Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
hrydgard
GitHub Repository: hrydgard/ppsspp
Path: blob/master/Common/GPU/Vulkan/VulkanContext.cpp
3187 views
1
#define __STDC_LIMIT_MACROS
2
3
#include <cstdlib>
4
#include <cstdint>
5
#include <cstring>
6
#include <iostream>
7
8
#include "Core/Config.h"
9
#include "Common/System/System.h"
10
#include "Common/System/Display.h"
11
#include "Common/Log.h"
12
#include "Common/GPU/Shader.h"
13
#include "Common/GPU/Vulkan/VulkanContext.h"
14
#include "Common/GPU/Vulkan/VulkanDebug.h"
15
#include "Common/StringUtils.h"
16
17
#ifdef USE_CRT_DBG
18
#undef new
19
#endif
20
21
#include "ext/vma/vk_mem_alloc.h"
22
23
24
// Change this to 1, 2, and 3 to fake failures in a few places, so that
25
// we can test our fallback-to-GL code.
26
#define SIMULATE_VULKAN_FAILURE 0
27
28
#include "ext/glslang/SPIRV/GlslangToSpv.h"
29
30
#ifdef USE_CRT_DBG
31
#define new DBG_NEW
32
#endif
33
34
using namespace PPSSPP_VK;
35
36
VulkanLogOptions g_LogOptions;
37
38
static const char * const validationLayers[] = {
39
"VK_LAYER_KHRONOS_validation",
40
/*
41
// For layers included in the Android NDK.
42
"VK_LAYER_GOOGLE_threading",
43
"VK_LAYER_LUNARG_parameter_validation",
44
"VK_LAYER_LUNARG_core_validation",
45
"VK_LAYER_LUNARG_image",
46
"VK_LAYER_LUNARG_object_tracker",
47
"VK_LAYER_LUNARG_swapchain",
48
"VK_LAYER_GOOGLE_unique_objects",
49
*/
50
};
51
52
std::string VulkanVendorString(uint32_t vendorId) {
53
switch (vendorId) {
54
case VULKAN_VENDOR_INTEL: return "Intel";
55
case VULKAN_VENDOR_NVIDIA: return "NVIDIA";
56
case VULKAN_VENDOR_AMD: return "AMD";
57
case VULKAN_VENDOR_ARM: return "ARM";
58
case VULKAN_VENDOR_QUALCOMM: return "Qualcomm";
59
case VULKAN_VENDOR_IMGTEC: return "Imagination";
60
case VULKAN_VENDOR_APPLE: return "Apple";
61
case VULKAN_VENDOR_MESA: return "Mesa";
62
default:
63
return StringFromFormat("%08x", vendorId);
64
}
65
}
66
67
const char *VulkanPresentModeToString(VkPresentModeKHR presentMode) {
68
switch (presentMode) {
69
case VK_PRESENT_MODE_IMMEDIATE_KHR: return "IMMEDIATE";
70
case VK_PRESENT_MODE_MAILBOX_KHR: return "MAILBOX";
71
case VK_PRESENT_MODE_FIFO_KHR: return "FIFO";
72
case VK_PRESENT_MODE_FIFO_RELAXED_KHR: return "FIFO_RELAXED";
73
case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR: return "SHARED_DEMAND_REFRESH_KHR";
74
case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR: return "SHARED_CONTINUOUS_REFRESH_KHR";
75
default: return "UNKNOWN";
76
}
77
}
78
79
const char *VulkanImageLayoutToString(VkImageLayout imageLayout) {
80
switch (imageLayout) {
81
case VK_IMAGE_LAYOUT_UNDEFINED: return "UNDEFINED";
82
case VK_IMAGE_LAYOUT_GENERAL: return "GENERAL";
83
case VK_IMAGE_LAYOUT_PREINITIALIZED: return "PREINITIALIZED";
84
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: return "TRANSFER_SRC_OPTIMAL";
85
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: return "TRANSFER_DST_OPTIMAL";
86
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: return "SHADER_READ_ONLY_OPTIMAL";
87
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: return "COLOR_ATTACHMENT_OPTIMAL";
88
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: return "DEPTH_STENCIL_ATTACHMENT_OPTIMAL";
89
default: return "OTHER";
90
}
91
}
92
93
VulkanContext::VulkanContext() {
94
// Do nothing here.
95
}
96
97
VkResult VulkanContext::CreateInstance(const CreateInfo &info) {
98
if (!vkCreateInstance) {
99
init_error_ = "Vulkan not loaded - can't create instance";
100
return VK_ERROR_INITIALIZATION_FAILED;
101
}
102
103
if (info.flags & VulkanInitFlags::DISABLE_IMPLICIT_LAYERS) {
104
// https://github.com/KhronosGroup/Vulkan-Loader/blob/main/docs/LoaderDebugging.md
105
#if PPSSPP_PLATFORM(WINDOWS)
106
#if !PPSSPP_PLATFORM(UWP)
107
// Windows uses _putenv_s
108
_putenv_s("VK_LOADER_LAYERS_DISABLE", "~implicit~");
109
#endif
110
#else
111
// POSIX: use setenv
112
setenv("VK_LOADER_LAYERS_DISABLE", "~implicit~", 1); // overwrite = 1
113
#endif
114
}
115
116
// Check which Vulkan version we should request.
117
// Our code is fine with any version from 1.0 to 1.2, we don't know about higher versions.
118
vulkanInstanceApiVersion_ = VK_API_VERSION_1_0;
119
if (vkEnumerateInstanceVersion) {
120
vkEnumerateInstanceVersion(&vulkanInstanceApiVersion_);
121
vulkanInstanceApiVersion_ &= 0xFFFFF000; // Remove patch version.
122
vulkanInstanceApiVersion_ = std::min(VK_API_VERSION_1_4, vulkanInstanceApiVersion_);
123
std::string versionString = FormatAPIVersion(vulkanInstanceApiVersion_);
124
INFO_LOG(Log::G3D, "Detected Vulkan API version: %s", versionString.c_str());
125
}
126
127
instance_layer_names_.clear();
128
device_layer_names_.clear();
129
130
// We can get the list of layers and extensions without an instance so we can use this information
131
// to enable the extensions we need that are available.
132
GetInstanceLayerProperties();
133
GetInstanceLayerExtensionList(nullptr, instance_extension_properties_);
134
135
if (!IsInstanceExtensionAvailable(VK_KHR_SURFACE_EXTENSION_NAME)) {
136
// Cannot create a Vulkan display without VK_KHR_SURFACE_EXTENSION.
137
init_error_ = "Vulkan not loaded - no surface extension";
138
return VK_ERROR_INITIALIZATION_FAILED;
139
}
140
flags_ = info.flags;
141
142
// List extensions to try to enable.
143
instance_extensions_enabled_.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
144
#ifdef _WIN32
145
instance_extensions_enabled_.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
146
#elif defined(__ANDROID__)
147
instance_extensions_enabled_.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
148
#else
149
#if defined(VK_USE_PLATFORM_XLIB_KHR)
150
if (IsInstanceExtensionAvailable(VK_KHR_XLIB_SURFACE_EXTENSION_NAME)) {
151
instance_extensions_enabled_.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
152
}
153
#endif
154
//#if defined(VK_USE_PLATFORM_XCB_KHR)
155
// instance_extensions_enabled_.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
156
//#endif
157
#if defined(VK_USE_PLATFORM_WAYLAND_KHR)
158
if (IsInstanceExtensionAvailable(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME)) {
159
instance_extensions_enabled_.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
160
}
161
#endif
162
#if defined(VK_USE_PLATFORM_DISPLAY_KHR)
163
if (IsInstanceExtensionAvailable(VK_KHR_DISPLAY_EXTENSION_NAME)) {
164
instance_extensions_enabled_.push_back(VK_KHR_DISPLAY_EXTENSION_NAME);
165
}
166
#endif
167
#if defined(VK_USE_PLATFORM_METAL_EXT)
168
if (IsInstanceExtensionAvailable(VK_EXT_METAL_SURFACE_EXTENSION_NAME)) {
169
instance_extensions_enabled_.push_back(VK_EXT_METAL_SURFACE_EXTENSION_NAME);
170
}
171
#endif
172
#endif
173
174
if ((flags_ & VulkanInitFlags::VALIDATE) && g_Config.sCustomDriver.empty()) {
175
if (IsInstanceExtensionAvailable(VK_EXT_DEBUG_UTILS_EXTENSION_NAME)) {
176
// Enable the validation layers
177
for (size_t i = 0; i < ARRAY_SIZE(validationLayers); i++) {
178
instance_layer_names_.push_back(validationLayers[i]);
179
device_layer_names_.push_back(validationLayers[i]);
180
}
181
instance_extensions_enabled_.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
182
extensionsLookup_.EXT_debug_utils = true;
183
INFO_LOG(Log::G3D, "Vulkan debug_utils validation enabled.");
184
} else {
185
ERROR_LOG(Log::G3D, "Validation layer extension not available - not enabling Vulkan validation.");
186
flags_ &= ~VulkanInitFlags::VALIDATE;
187
}
188
}
189
190
// Uncomment to test GPU backend fallback
191
// abort();
192
193
if (EnableInstanceExtension(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_API_VERSION_1_1)) {
194
extensionsLookup_.KHR_get_physical_device_properties2 = true;
195
}
196
197
if (EnableInstanceExtension(VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME, 0)) {
198
extensionsLookup_.EXT_swapchain_colorspace = true;
199
}
200
#if PPSSPP_PLATFORM(IOS_APP_STORE)
201
if (EnableInstanceExtension(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME, 0)) {
202
203
}
204
#endif
205
206
// Validate that all the instance extensions we ask for are actually available.
207
for (auto ext : instance_extensions_enabled_) {
208
if (!IsInstanceExtensionAvailable(ext))
209
WARN_LOG(Log::G3D, "WARNING: Does not seem that instance extension '%s' is available. Trying to proceed anyway.", ext);
210
}
211
212
VkApplicationInfo app_info{ VK_STRUCTURE_TYPE_APPLICATION_INFO };
213
app_info.pApplicationName = info.app_name;
214
app_info.applicationVersion = info.app_ver;
215
app_info.pEngineName = info.app_name;
216
// Let's increment this when we make major engine/context changes.
217
app_info.engineVersion = 2;
218
app_info.apiVersion = vulkanInstanceApiVersion_;
219
220
VkInstanceCreateInfo inst_info{ VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO };
221
inst_info.flags = 0;
222
inst_info.pApplicationInfo = &app_info;
223
inst_info.enabledLayerCount = (uint32_t)instance_layer_names_.size();
224
inst_info.ppEnabledLayerNames = instance_layer_names_.size() ? instance_layer_names_.data() : nullptr;
225
inst_info.enabledExtensionCount = (uint32_t)instance_extensions_enabled_.size();
226
inst_info.ppEnabledExtensionNames = instance_extensions_enabled_.size() ? instance_extensions_enabled_.data() : nullptr;
227
228
#if PPSSPP_PLATFORM(IOS_APP_STORE)
229
inst_info.flags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
230
#endif
231
232
#if SIMULATE_VULKAN_FAILURE == 2
233
VkResult res = VK_ERROR_INCOMPATIBLE_DRIVER;
234
#else
235
VkResult res = vkCreateInstance(&inst_info, nullptr, &instance_);
236
#endif
237
if (res != VK_SUCCESS) {
238
if (res == VK_ERROR_LAYER_NOT_PRESENT) {
239
WARN_LOG(Log::G3D, "Validation on but instance layer not available - dropping layers");
240
// Drop the validation layers and try again.
241
instance_layer_names_.clear();
242
device_layer_names_.clear();
243
inst_info.enabledLayerCount = 0;
244
inst_info.ppEnabledLayerNames = nullptr;
245
res = vkCreateInstance(&inst_info, nullptr, &instance_);
246
if (res != VK_SUCCESS)
247
ERROR_LOG(Log::G3D, "Failed to create instance even without validation: %d", res);
248
} else {
249
ERROR_LOG(Log::G3D, "Failed to create instance : %d", res);
250
}
251
}
252
if (res != VK_SUCCESS) {
253
init_error_ = "Failed to create Vulkan instance";
254
return res;
255
}
256
257
VulkanLoadInstanceFunctions(instance_, extensionsLookup_, vulkanInstanceApiVersion_);
258
if (!CheckLayers(instance_layer_properties_, instance_layer_names_)) {
259
WARN_LOG(Log::G3D, "CheckLayers for instance failed");
260
// init_error_ = "Failed to validate instance layers";
261
// return;
262
}
263
264
uint32_t gpu_count = 1;
265
#if SIMULATE_VULKAN_FAILURE == 3
266
gpu_count = 0;
267
#else
268
res = vkEnumeratePhysicalDevices(instance_, &gpu_count, nullptr);
269
#endif
270
if (gpu_count <= 0) {
271
ERROR_LOG(Log::G3D, "Vulkan driver found but no supported GPU is available");
272
init_error_ = "No Vulkan physical devices found";
273
vkDestroyInstance(instance_, nullptr);
274
instance_ = nullptr;
275
return VK_ERROR_INITIALIZATION_FAILED;
276
}
277
278
_dbg_assert_(gpu_count > 0);
279
physical_devices_.resize(gpu_count);
280
physicalDeviceProperties_.resize(gpu_count);
281
res = vkEnumeratePhysicalDevices(instance_, &gpu_count, physical_devices_.data());
282
if (res != VK_SUCCESS) {
283
init_error_ = "Failed to enumerate physical devices";
284
vkDestroyInstance(instance_, nullptr);
285
instance_ = nullptr;
286
return res;
287
}
288
289
if (extensionsLookup_.KHR_get_physical_device_properties2 && vkGetPhysicalDeviceProperties2) {
290
for (uint32_t i = 0; i < gpu_count; i++) {
291
VkPhysicalDeviceProperties2 props2{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2};
292
VkPhysicalDevicePushDescriptorPropertiesKHR pushProps{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR};
293
VkPhysicalDeviceExternalMemoryHostPropertiesEXT extHostMemProps{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT};
294
VkPhysicalDeviceDepthStencilResolveProperties depthStencilResolveProps{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES};
295
ChainStruct(props2, &pushProps);
296
ChainStruct(props2, &extHostMemProps);
297
ChainStruct(props2, &depthStencilResolveProps);
298
vkGetPhysicalDeviceProperties2(physical_devices_[i], &props2);
299
300
// Don't want bad pointers sitting around. Probably not really necessary.
301
props2.pNext = nullptr;
302
pushProps.pNext = nullptr;
303
extHostMemProps.pNext = nullptr;
304
depthStencilResolveProps.pNext = nullptr;
305
physicalDeviceProperties_[i].properties = props2.properties;
306
physicalDeviceProperties_[i].pushDescriptorProperties = pushProps;
307
physicalDeviceProperties_[i].externalMemoryHostProperties = extHostMemProps;
308
physicalDeviceProperties_[i].depthStencilResolve = depthStencilResolveProps;
309
}
310
} else {
311
for (uint32_t i = 0; i < gpu_count; i++) {
312
vkGetPhysicalDeviceProperties(physical_devices_[i], &physicalDeviceProperties_[i].properties);
313
}
314
}
315
316
if (extensionsLookup_.EXT_debug_utils) {
317
_assert_(vkCreateDebugUtilsMessengerEXT != nullptr);
318
InitDebugUtilsCallback();
319
}
320
321
return VK_SUCCESS;
322
}
323
324
VulkanContext::~VulkanContext() {
325
_dbg_assert_(instance_ == VK_NULL_HANDLE);
326
}
327
328
void VulkanContext::DestroyInstance() {
329
if (extensionsLookup_.EXT_debug_utils) {
330
while (utils_callbacks.size() > 0) {
331
vkDestroyDebugUtilsMessengerEXT(instance_, utils_callbacks.back(), nullptr);
332
utils_callbacks.pop_back();
333
}
334
}
335
336
vkDestroyInstance(instance_, nullptr);
337
VulkanFree();
338
instance_ = VK_NULL_HANDLE;
339
}
340
341
void VulkanContext::BeginFrame(VkCommandBuffer firstCommandBuffer) {
342
FrameData *frame = &frame_[curFrame_];
343
// Process pending deletes.
344
frame->deleteList.PerformDeletes(this, allocator_);
345
// VK_NULL_HANDLE when profiler is disabled.
346
if (firstCommandBuffer) {
347
frame->profiler.BeginFrame(this, firstCommandBuffer);
348
}
349
}
350
351
void VulkanContext::EndFrame() {
352
frame_[curFrame_].deleteList.Take(globalDeleteList_);
353
curFrame_++;
354
if (curFrame_ >= inflightFrames_) {
355
curFrame_ = 0;
356
}
357
}
358
359
void VulkanContext::UpdateInflightFrames(int n) {
360
_dbg_assert_(n >= 1 && n <= MAX_INFLIGHT_FRAMES);
361
inflightFrames_ = n;
362
if (curFrame_ >= inflightFrames_) {
363
curFrame_ = 0;
364
}
365
}
366
367
void VulkanContext::WaitUntilQueueIdle() {
368
// Should almost never be used
369
vkQueueWaitIdle(gfx_queue_);
370
}
371
372
bool VulkanContext::MemoryTypeFromProperties(uint32_t typeBits, VkFlags requirements_mask, uint32_t *typeIndex) {
373
// Search memtypes to find first index with those properties
374
for (uint32_t i = 0; i < 32; i++) {
375
if ((typeBits & 1) == 1) {
376
// Type is available, does it match user properties?
377
if ((memory_properties_.memoryTypes[i].propertyFlags & requirements_mask) == requirements_mask) {
378
*typeIndex = i;
379
return true;
380
}
381
}
382
typeBits >>= 1;
383
}
384
// No memory types matched, return failure
385
return false;
386
}
387
388
void VulkanContext::DestroySwapchain() {
389
if (swapchain_ != VK_NULL_HANDLE) {
390
vkDestroySwapchainKHR(device_, swapchain_, nullptr);
391
swapchain_ = VK_NULL_HANDLE;
392
}
393
swapchainInited_ = false;
394
}
395
396
void VulkanContext::DestroySurface() {
397
if (surface_ != VK_NULL_HANDLE) {
398
vkDestroySurfaceKHR(instance_, surface_, nullptr);
399
surface_ = VK_NULL_HANDLE;
400
}
401
}
402
403
VkResult VulkanContext::GetInstanceLayerExtensionList(const char *layerName, std::vector<VkExtensionProperties> &extensions) {
404
VkResult res;
405
do {
406
uint32_t instance_extension_count;
407
res = vkEnumerateInstanceExtensionProperties(layerName, &instance_extension_count, nullptr);
408
if (res != VK_SUCCESS)
409
return res;
410
if (instance_extension_count == 0)
411
return VK_SUCCESS;
412
extensions.resize(instance_extension_count);
413
res = vkEnumerateInstanceExtensionProperties(layerName, &instance_extension_count, extensions.data());
414
} while (res == VK_INCOMPLETE);
415
return res;
416
}
417
418
VkResult VulkanContext::GetInstanceLayerProperties() {
419
/*
420
* It's possible, though very rare, that the number of
421
* instance layers could change. For example, installing something
422
* could include new layers that the loader would pick up
423
* between the initial query for the count and the
424
* request for VkLayerProperties. The loader indicates that
425
* by returning a VK_INCOMPLETE status and will update the
426
* the count parameter.
427
* The count parameter will be updated with the number of
428
* entries loaded into the data pointer - in case the number
429
* of layers went down or is smaller than the size given.
430
*/
431
uint32_t instance_layer_count;
432
std::vector<VkLayerProperties> vk_props;
433
VkResult res;
434
do {
435
res = vkEnumerateInstanceLayerProperties(&instance_layer_count, nullptr);
436
if (res != VK_SUCCESS)
437
return res;
438
if (!instance_layer_count)
439
return VK_SUCCESS;
440
vk_props.resize(instance_layer_count);
441
res = vkEnumerateInstanceLayerProperties(&instance_layer_count, vk_props.data());
442
} while (res == VK_INCOMPLETE);
443
444
// Now gather the extension list for each instance layer.
445
for (uint32_t i = 0; i < instance_layer_count; i++) {
446
LayerProperties layer_props;
447
layer_props.properties = vk_props[i];
448
res = GetInstanceLayerExtensionList(layer_props.properties.layerName, layer_props.extensions);
449
if (res != VK_SUCCESS)
450
return res;
451
instance_layer_properties_.push_back(layer_props);
452
}
453
return res;
454
}
455
456
// Pass layerName == nullptr to get the extension list for the device.
457
VkResult VulkanContext::GetDeviceLayerExtensionList(const char *layerName, std::vector<VkExtensionProperties> &extensions) {
458
VkResult res;
459
do {
460
uint32_t device_extension_count;
461
res = vkEnumerateDeviceExtensionProperties(physical_devices_[physical_device_], layerName, &device_extension_count, nullptr);
462
if (res != VK_SUCCESS)
463
return res;
464
if (!device_extension_count)
465
return VK_SUCCESS;
466
extensions.resize(device_extension_count);
467
res = vkEnumerateDeviceExtensionProperties(physical_devices_[physical_device_], layerName, &device_extension_count, extensions.data());
468
} while (res == VK_INCOMPLETE);
469
return res;
470
}
471
472
VkResult VulkanContext::GetDeviceLayerProperties() {
473
/*
474
* It's possible, though very rare, that the number of
475
* instance layers could change. For example, installing something
476
* could include new layers that the loader would pick up
477
* between the initial query for the count and the
478
* request for VkLayerProperties. The loader indicates that
479
* by returning a VK_INCOMPLETE status and will update the
480
* the count parameter.
481
* The count parameter will be updated with the number of
482
* entries loaded into the data pointer - in case the number
483
* of layers went down or is smaller than the size given.
484
*/
485
uint32_t device_layer_count;
486
std::vector<VkLayerProperties> vk_props;
487
VkResult res;
488
do {
489
res = vkEnumerateDeviceLayerProperties(physical_devices_[physical_device_], &device_layer_count, nullptr);
490
if (res != VK_SUCCESS)
491
return res;
492
if (device_layer_count == 0)
493
return VK_SUCCESS;
494
vk_props.resize(device_layer_count);
495
res = vkEnumerateDeviceLayerProperties(physical_devices_[physical_device_], &device_layer_count, vk_props.data());
496
} while (res == VK_INCOMPLETE);
497
498
// Gather the list of extensions for each device layer.
499
for (uint32_t i = 0; i < device_layer_count; i++) {
500
LayerProperties layer_props;
501
layer_props.properties = vk_props[i];
502
res = GetDeviceLayerExtensionList(layer_props.properties.layerName, layer_props.extensions);
503
if (res != VK_SUCCESS)
504
return res;
505
device_layer_properties_.push_back(layer_props);
506
}
507
return res;
508
}
509
510
// Returns true if all layer names specified in check_names can be found in given layer properties.
511
bool VulkanContext::CheckLayers(const std::vector<LayerProperties> &layer_props, const std::vector<const char *> &layer_names) const {
512
uint32_t check_count = (uint32_t)layer_names.size();
513
uint32_t layer_count = (uint32_t)layer_props.size();
514
for (uint32_t i = 0; i < check_count; i++) {
515
bool found = false;
516
for (uint32_t j = 0; j < layer_count; j++) {
517
if (!strcmp(layer_names[i], layer_props[j].properties.layerName)) {
518
found = true;
519
}
520
}
521
if (!found) {
522
std::cout << "Cannot find layer: " << layer_names[i] << std::endl;
523
return false;
524
}
525
}
526
return true;
527
}
528
529
int VulkanContext::GetPhysicalDeviceByName(std::string_view name) const {
530
for (size_t i = 0; i < physical_devices_.size(); i++) {
531
if (equals(physicalDeviceProperties_[i].properties.deviceName, name))
532
return (int)i;
533
}
534
return -1;
535
}
536
537
int VulkanContext::GetBestPhysicalDevice() const {
538
// Rules: Prefer discrete over embedded.
539
// Prefer nVidia over Intel.
540
541
int maxScore = -1;
542
int best = -1;
543
544
for (size_t i = 0; i < physical_devices_.size(); i++) {
545
int score = 0;
546
VkPhysicalDeviceProperties props;
547
vkGetPhysicalDeviceProperties(physical_devices_[i], &props);
548
switch (props.deviceType) {
549
case VK_PHYSICAL_DEVICE_TYPE_CPU:
550
score += 1;
551
break;
552
case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:
553
score += 2;
554
break;
555
case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
556
score += 20;
557
break;
558
case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:
559
score += 10;
560
break;
561
default:
562
break;
563
}
564
if (props.vendorID == VULKAN_VENDOR_AMD) {
565
score += 5;
566
} else if (props.vendorID == VULKAN_VENDOR_NVIDIA) {
567
score += 5;
568
}
569
if (score > maxScore) {
570
best = (int)i;
571
maxScore = score;
572
}
573
}
574
return best;
575
}
576
577
bool VulkanContext::EnableDeviceExtension(const char *extension, uint32_t coreVersion) {
578
if (coreVersion != 0 && vulkanDeviceApiVersion_ >= coreVersion) {
579
return true;
580
}
581
for (auto &iter : device_extension_properties_) {
582
if (!strcmp(iter.extensionName, extension)) {
583
device_extensions_enabled_.push_back(extension);
584
return true;
585
}
586
}
587
return false;
588
}
589
590
bool VulkanContext::EnableInstanceExtension(const char *extension, uint32_t coreVersion) {
591
if (coreVersion != 0 && vulkanInstanceApiVersion_ >= coreVersion) {
592
return true;
593
}
594
for (auto &iter : instance_extension_properties_) {
595
if (!strcmp(iter.extensionName, extension)) {
596
instance_extensions_enabled_.push_back(extension);
597
return true;
598
}
599
}
600
return false;
601
}
602
603
VkResult VulkanContext::CreateDevice(int physical_device) {
604
physical_device_ = physical_device;
605
INFO_LOG(Log::G3D, "Chose physical device %d: %s", physical_device, physicalDeviceProperties_[physical_device].properties.deviceName);
606
607
vulkanDeviceApiVersion_ = physicalDeviceProperties_[physical_device].properties.apiVersion;
608
609
GetDeviceLayerProperties();
610
if (!CheckLayers(device_layer_properties_, device_layer_names_)) {
611
WARN_LOG(Log::G3D, "CheckLayers for device %d failed", physical_device);
612
}
613
614
vkGetPhysicalDeviceQueueFamilyProperties(physical_devices_[physical_device_], &queue_count, nullptr);
615
_dbg_assert_(queue_count >= 1);
616
617
queueFamilyProperties_.resize(queue_count);
618
vkGetPhysicalDeviceQueueFamilyProperties(physical_devices_[physical_device_], &queue_count, queueFamilyProperties_.data());
619
_dbg_assert_(queue_count >= 1);
620
621
// Detect preferred depth/stencil formats, in this order. All supported devices will support at least one of these.
622
static const VkFormat depthStencilFormats[] = {
623
VK_FORMAT_D24_UNORM_S8_UINT,
624
VK_FORMAT_D32_SFLOAT_S8_UINT,
625
VK_FORMAT_D16_UNORM_S8_UINT,
626
};
627
628
deviceInfo_.preferredDepthStencilFormat = VK_FORMAT_UNDEFINED;
629
for (size_t i = 0; i < ARRAY_SIZE(depthStencilFormats); i++) {
630
VkFormatProperties props;
631
vkGetPhysicalDeviceFormatProperties(physical_devices_[physical_device_], depthStencilFormats[i], &props);
632
if (props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
633
deviceInfo_.preferredDepthStencilFormat = depthStencilFormats[i];
634
break;
635
}
636
}
637
638
_assert_msg_(deviceInfo_.preferredDepthStencilFormat != VK_FORMAT_UNDEFINED, "Could not find a usable depth stencil format.");
639
VkFormatProperties preferredProps;
640
vkGetPhysicalDeviceFormatProperties(physical_devices_[physical_device_], deviceInfo_.preferredDepthStencilFormat, &preferredProps);
641
if ((preferredProps.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_SRC_BIT) &&
642
(preferredProps.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_DST_BIT)) {
643
deviceInfo_.canBlitToPreferredDepthStencilFormat = true;
644
}
645
646
// This is as good a place as any to do this. Though, we don't use this much anymore after we added
647
// support for VMA.
648
vkGetPhysicalDeviceMemoryProperties(physical_devices_[physical_device_], &memory_properties_);
649
DEBUG_LOG(Log::G3D, "Memory Types (%d):", memory_properties_.memoryTypeCount);
650
for (int i = 0; i < (int)memory_properties_.memoryTypeCount; i++) {
651
// Don't bother printing dummy memory types.
652
if (!memory_properties_.memoryTypes[i].propertyFlags)
653
continue;
654
DEBUG_LOG(Log::G3D, " %d: Heap %d; Flags: %s%s%s%s ", i, memory_properties_.memoryTypes[i].heapIndex,
655
(memory_properties_.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) ? "DEVICE_LOCAL " : "",
656
(memory_properties_.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) ? "HOST_VISIBLE " : "",
657
(memory_properties_.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) ? "HOST_CACHED " : "",
658
(memory_properties_.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) ? "HOST_COHERENT " : "");
659
}
660
661
GetDeviceLayerExtensionList(nullptr, device_extension_properties_);
662
663
device_extensions_enabled_.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
664
665
if (!init_error_.empty() || physical_device_ < 0) {
666
ERROR_LOG(Log::G3D, "Vulkan init failed: %s", init_error_.c_str());
667
return VK_ERROR_INITIALIZATION_FAILED;
668
}
669
670
VkDeviceQueueCreateInfo queue_info{ VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO };
671
float queue_priorities[1] = { 1.0f };
672
queue_info.queueCount = 1;
673
queue_info.pQueuePriorities = queue_priorities;
674
bool found = false;
675
for (int i = 0; i < (int)queue_count; i++) {
676
if (queueFamilyProperties_[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) {
677
queue_info.queueFamilyIndex = i;
678
found = true;
679
break;
680
}
681
}
682
_dbg_assert_(found);
683
684
// TODO: A lot of these are on by default in later Vulkan versions, should check for that, technically.
685
extensionsLookup_.KHR_maintenance1 = EnableDeviceExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME, VK_API_VERSION_1_1);
686
extensionsLookup_.KHR_maintenance2 = EnableDeviceExtension(VK_KHR_MAINTENANCE2_EXTENSION_NAME, VK_API_VERSION_1_1);
687
extensionsLookup_.KHR_maintenance3 = EnableDeviceExtension(VK_KHR_MAINTENANCE3_EXTENSION_NAME, VK_API_VERSION_1_1);
688
extensionsLookup_.KHR_maintenance4 = EnableDeviceExtension("VK_KHR_maintenance4", VK_API_VERSION_1_3);
689
extensionsLookup_.KHR_multiview = EnableDeviceExtension(VK_KHR_MULTIVIEW_EXTENSION_NAME, VK_API_VERSION_1_1);
690
691
if (EnableDeviceExtension(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, VK_API_VERSION_1_1)) {
692
extensionsLookup_.KHR_get_memory_requirements2 = true;
693
extensionsLookup_.KHR_dedicated_allocation = EnableDeviceExtension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, VK_API_VERSION_1_1);
694
}
695
if (EnableDeviceExtension(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME, VK_API_VERSION_1_2)) {
696
extensionsLookup_.KHR_create_renderpass2 = true;
697
extensionsLookup_.KHR_depth_stencil_resolve = EnableDeviceExtension(VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME, VK_API_VERSION_1_2);
698
}
699
700
extensionsLookup_.EXT_shader_stencil_export = EnableDeviceExtension(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, 0);
701
extensionsLookup_.EXT_fragment_shader_interlock = EnableDeviceExtension(VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME, 0);
702
extensionsLookup_.ARM_rasterization_order_attachment_access = EnableDeviceExtension(VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME, 0);
703
704
#if !PPSSPP_PLATFORM(MAC) && !PPSSPP_PLATFORM(IOS)
705
extensionsLookup_.GOOGLE_display_timing = EnableDeviceExtension(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME, 0);
706
#endif
707
if (!extensionsLookup_.GOOGLE_display_timing) {
708
extensionsLookup_.KHR_present_id = EnableDeviceExtension(VK_KHR_PRESENT_ID_EXTENSION_NAME, 0);
709
extensionsLookup_.KHR_present_wait = EnableDeviceExtension(VK_KHR_PRESENT_WAIT_EXTENSION_NAME, 0);
710
}
711
712
extensionsLookup_.EXT_provoking_vertex = EnableDeviceExtension(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME, 0);
713
714
// Optional features
715
if (extensionsLookup_.KHR_get_physical_device_properties2 && vkGetPhysicalDeviceFeatures2) {
716
VkPhysicalDeviceFeatures2 features2{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR };
717
// Add to chain even if not supported, GetPhysicalDeviceFeatures is supposed to ignore unknown structs.
718
VkPhysicalDeviceMultiviewFeatures multiViewFeatures{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES };
719
VkPhysicalDevicePresentWaitFeaturesKHR presentWaitFeatures{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR };
720
VkPhysicalDevicePresentIdFeaturesKHR presentIdFeatures{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR };
721
VkPhysicalDeviceProvokingVertexFeaturesEXT provokingVertexFeatures{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT };
722
723
ChainStruct(features2, &multiViewFeatures);
724
if (extensionsLookup_.KHR_present_wait) {
725
ChainStruct(features2, &presentWaitFeatures);
726
}
727
if (extensionsLookup_.KHR_present_id) {
728
ChainStruct(features2, &presentIdFeatures);
729
}
730
if (extensionsLookup_.EXT_provoking_vertex) {
731
ChainStruct(features2, &provokingVertexFeatures);
732
}
733
vkGetPhysicalDeviceFeatures2(physical_devices_[physical_device_], &features2);
734
deviceFeatures_.available.standard = features2.features;
735
deviceFeatures_.available.multiview = multiViewFeatures;
736
if (extensionsLookup_.KHR_present_wait) {
737
deviceFeatures_.available.presentWait = presentWaitFeatures;
738
}
739
if (extensionsLookup_.KHR_present_id) {
740
deviceFeatures_.available.presentId = presentIdFeatures;
741
}
742
if (extensionsLookup_.EXT_provoking_vertex) {
743
deviceFeatures_.available.provokingVertex = provokingVertexFeatures;
744
}
745
} else {
746
vkGetPhysicalDeviceFeatures(physical_devices_[physical_device_], &deviceFeatures_.available.standard);
747
deviceFeatures_.available.multiview = {};
748
}
749
750
deviceFeatures_.enabled = {};
751
// Enable a few safe ones if they are available.
752
deviceFeatures_.enabled.standard.dualSrcBlend = deviceFeatures_.available.standard.dualSrcBlend;
753
deviceFeatures_.enabled.standard.logicOp = deviceFeatures_.available.standard.logicOp;
754
deviceFeatures_.enabled.standard.depthClamp = deviceFeatures_.available.standard.depthClamp;
755
deviceFeatures_.enabled.standard.depthBounds = deviceFeatures_.available.standard.depthBounds;
756
deviceFeatures_.enabled.standard.samplerAnisotropy = deviceFeatures_.available.standard.samplerAnisotropy;
757
deviceFeatures_.enabled.standard.shaderClipDistance = deviceFeatures_.available.standard.shaderClipDistance;
758
deviceFeatures_.enabled.standard.shaderCullDistance = deviceFeatures_.available.standard.shaderCullDistance;
759
deviceFeatures_.enabled.standard.geometryShader = deviceFeatures_.available.standard.geometryShader;
760
deviceFeatures_.enabled.standard.sampleRateShading = deviceFeatures_.available.standard.sampleRateShading;
761
762
#ifdef _DEBUG
763
// For debugging! Although, it might hide problems, so turning it off. Can be useful to rule out classes of issues.
764
// deviceFeatures_.enabled.standard.robustBufferAccess = deviceFeatures_.available.standard.robustBufferAccess;
765
#endif
766
767
deviceFeatures_.enabled.multiview = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES };
768
if (extensionsLookup_.KHR_multiview) {
769
deviceFeatures_.enabled.multiview.multiview = deviceFeatures_.available.multiview.multiview;
770
}
771
// Strangely, on Intel, it reports these as available even though the extension isn't in the list.
772
deviceFeatures_.enabled.presentId = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR };
773
if (extensionsLookup_.KHR_present_id) {
774
deviceFeatures_.enabled.presentId.presentId = deviceFeatures_.available.presentId.presentId;
775
}
776
deviceFeatures_.enabled.presentWait = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR };
777
if (extensionsLookup_.KHR_present_wait) {
778
deviceFeatures_.enabled.presentWait.presentWait = deviceFeatures_.available.presentWait.presentWait;
779
}
780
deviceFeatures_.enabled.provokingVertex = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT };
781
if (extensionsLookup_.EXT_provoking_vertex) {
782
deviceFeatures_.enabled.provokingVertex.provokingVertexLast = true;
783
}
784
785
// deviceFeatures_.enabled.multiview.multiviewGeometryShader = deviceFeatures_.available.multiview.multiviewGeometryShader;
786
787
VkPhysicalDeviceFeatures2 features2{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 };
788
789
VkDeviceCreateInfo device_info{ VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO };
790
device_info.queueCreateInfoCount = 1;
791
device_info.pQueueCreateInfos = &queue_info;
792
device_info.enabledLayerCount = (uint32_t)device_layer_names_.size();
793
device_info.ppEnabledLayerNames = device_info.enabledLayerCount ? device_layer_names_.data() : nullptr;
794
device_info.enabledExtensionCount = (uint32_t)device_extensions_enabled_.size();
795
device_info.ppEnabledExtensionNames = device_info.enabledExtensionCount ? device_extensions_enabled_.data() : nullptr;
796
797
if (extensionsLookup_.KHR_get_physical_device_properties2) {
798
device_info.pNext = &features2;
799
features2.features = deviceFeatures_.enabled.standard;
800
ChainStruct(features2, &deviceFeatures_.enabled.multiview);
801
if (extensionsLookup_.KHR_present_wait) {
802
ChainStruct(features2, &deviceFeatures_.enabled.presentWait);
803
}
804
if (extensionsLookup_.KHR_present_id) {
805
ChainStruct(features2, &deviceFeatures_.enabled.presentId);
806
}
807
if (extensionsLookup_.EXT_provoking_vertex) {
808
ChainStruct(features2, &deviceFeatures_.enabled.provokingVertex);
809
}
810
} else {
811
device_info.pEnabledFeatures = &deviceFeatures_.enabled.standard;
812
}
813
814
VkResult res = vkCreateDevice(physical_devices_[physical_device_], &device_info, nullptr, &device_);
815
if (res != VK_SUCCESS) {
816
init_error_ = "Unable to create Vulkan device";
817
ERROR_LOG(Log::G3D, "%s", init_error_.c_str());
818
} else {
819
VulkanLoadDeviceFunctions(device_, extensionsLookup_, vulkanDeviceApiVersion_);
820
}
821
INFO_LOG(Log::G3D, "Vulkan Device created: %s", physicalDeviceProperties_[physical_device_].properties.deviceName);
822
823
// Since we successfully created a device (however we got here, might be interesting in debug), we force the choice to be visible in the menu.
824
VulkanSetAvailable(true);
825
826
VmaAllocatorCreateInfo allocatorInfo = {};
827
allocatorInfo.vulkanApiVersion = std::min(vulkanDeviceApiVersion_, vulkanInstanceApiVersion_);
828
allocatorInfo.physicalDevice = physical_devices_[physical_device_];
829
allocatorInfo.device = device_;
830
allocatorInfo.instance = instance_;
831
VkResult result = vmaCreateAllocator(&allocatorInfo, &allocator_);
832
_assert_(result == VK_SUCCESS);
833
_assert_(allocator_ != VK_NULL_HANDLE);
834
835
// Examine the physical device to figure out super rough performance grade.
836
// Basically all we want to do is to identify low performance mobile devices
837
// so we can make decisions on things like texture scaling strategy.
838
auto &props = physicalDeviceProperties_[physical_device_].properties;
839
switch (props.vendorID) {
840
case VULKAN_VENDOR_AMD:
841
case VULKAN_VENDOR_NVIDIA:
842
case VULKAN_VENDOR_INTEL:
843
devicePerfClass_ = PerfClass::FAST;
844
break;
845
846
case VULKAN_VENDOR_ARM:
847
devicePerfClass_ = PerfClass::SLOW;
848
{
849
// Parse the device name as an ultra rough heuristic.
850
int maliG = 0;
851
if (sscanf(props.deviceName, "Mali-G%d", &maliG) == 1) {
852
if (maliG >= 72) {
853
devicePerfClass_ = PerfClass::FAST;
854
}
855
}
856
}
857
break;
858
859
case VULKAN_VENDOR_QUALCOMM:
860
devicePerfClass_ = PerfClass::SLOW;
861
#if PPSSPP_PLATFORM(ANDROID)
862
if (System_GetPropertyInt(SYSPROP_SYSTEMVERSION) >= 30) {
863
devicePerfClass_ = PerfClass::FAST;
864
}
865
#endif
866
break;
867
868
case VULKAN_VENDOR_IMGTEC:
869
default:
870
devicePerfClass_ = PerfClass::SLOW;
871
break;
872
}
873
874
return res;
875
}
876
877
VkResult VulkanContext::InitDebugUtilsCallback() {
878
VkDebugUtilsMessengerCreateInfoEXT callback1{VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT};
879
// We're intentionally skipping VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT and
880
// VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, just too spammy.
881
callback1.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
882
callback1.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
883
callback1.pfnUserCallback = &VulkanDebugUtilsCallback;
884
callback1.pUserData = (void *)&g_LogOptions;
885
VkDebugUtilsMessengerEXT messenger;
886
VkResult res = vkCreateDebugUtilsMessengerEXT(instance_, &callback1, nullptr, &messenger);
887
if (res != VK_SUCCESS) {
888
ERROR_LOG(Log::G3D, "Failed to register debug callback with vkCreateDebugUtilsMessengerEXT");
889
// Do error handling for VK_ERROR_OUT_OF_MEMORY
890
} else {
891
INFO_LOG(Log::G3D, "Debug callback registered with vkCreateDebugUtilsMessengerEXT.");
892
utils_callbacks.push_back(messenger);
893
}
894
return res;
895
}
896
897
bool VulkanContext::CreateInstanceAndDevice(const CreateInfo &info) {
898
VkResult res = CreateInstance(info);
899
if (res != VK_SUCCESS) {
900
ERROR_LOG(Log::G3D, "Failed to create vulkan context: %s", InitError().c_str());
901
VulkanSetAvailable(false);
902
return false;
903
}
904
905
int physicalDevice = GetBestPhysicalDevice();
906
if (physicalDevice < 0) {
907
ERROR_LOG(Log::G3D, "No usable Vulkan device found.");
908
DestroyInstance();
909
return false;
910
}
911
912
INFO_LOG(Log::G3D, "Creating Vulkan device (flags: %08x)", info.flags);
913
if (CreateDevice(physicalDevice) != VK_SUCCESS) {
914
INFO_LOG(Log::G3D, "Failed to create vulkan device: %s", InitError().c_str());
915
DestroyInstance();
916
return false;
917
}
918
919
return true;
920
}
921
922
void VulkanContext::SetDebugNameImpl(uint64_t handle, VkObjectType type, const char *name) {
923
VkDebugUtilsObjectNameInfoEXT info{ VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT };
924
info.pObjectName = name;
925
info.objectHandle = handle;
926
info.objectType = type;
927
vkSetDebugUtilsObjectNameEXT(device_, &info);
928
}
929
930
VkResult VulkanContext::InitSurface(WindowSystem winsys, void *data1, void *data2) {
931
winsys_ = winsys;
932
winsysData1_ = data1;
933
winsysData2_ = data2;
934
return ReinitSurface();
935
}
936
937
VkResult VulkanContext::ReinitSurface() {
938
if (surface_ != VK_NULL_HANDLE) {
939
INFO_LOG(Log::G3D, "Destroying Vulkan surface (%d, %d)", swapChainExtent_.width, swapChainExtent_.height);
940
vkDestroySurfaceKHR(instance_, surface_, nullptr);
941
surface_ = VK_NULL_HANDLE;
942
}
943
944
INFO_LOG(Log::G3D, "Creating Vulkan surface for window (data1=%p data2=%p)", winsysData1_, winsysData2_);
945
946
VkResult retval = VK_SUCCESS;
947
948
switch (winsys_) {
949
#ifdef _WIN32
950
case WINDOWSYSTEM_WIN32:
951
{
952
VkWin32SurfaceCreateInfoKHR win32{ VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR };
953
win32.flags = 0;
954
win32.hwnd = (HWND)winsysData2_;
955
win32.hinstance = (HINSTANCE)winsysData1_;
956
retval = vkCreateWin32SurfaceKHR(instance_, &win32, nullptr, &surface_);
957
break;
958
}
959
#endif
960
#if defined(__ANDROID__)
961
case WINDOWSYSTEM_ANDROID:
962
{
963
ANativeWindow *wnd = (ANativeWindow *)winsysData1_;
964
VkAndroidSurfaceCreateInfoKHR android{ VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR };
965
android.flags = 0;
966
android.window = wnd;
967
retval = vkCreateAndroidSurfaceKHR(instance_, &android, nullptr, &surface_);
968
break;
969
}
970
#endif
971
#if defined(VK_USE_PLATFORM_METAL_EXT)
972
case WINDOWSYSTEM_METAL_EXT:
973
{
974
VkMetalSurfaceCreateInfoEXT metal{ VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT };
975
metal.flags = 0;
976
metal.pLayer = winsysData1_;
977
metal.pNext = winsysData2_;
978
retval = vkCreateMetalSurfaceEXT(instance_, &metal, nullptr, &surface_);
979
break;
980
}
981
#endif
982
#if defined(VK_USE_PLATFORM_XLIB_KHR)
983
case WINDOWSYSTEM_XLIB:
984
{
985
VkXlibSurfaceCreateInfoKHR xlib{ VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR };
986
xlib.flags = 0;
987
xlib.dpy = (Display *)winsysData1_;
988
xlib.window = (Window)winsysData2_;
989
retval = vkCreateXlibSurfaceKHR(instance_, &xlib, nullptr, &surface_);
990
break;
991
}
992
#endif
993
#if defined(VK_USE_PLATFORM_XCB_KHR)
994
case WINDOWSYSTEM_XCB:
995
{
996
VkXCBSurfaceCreateInfoKHR xcb{ VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR };
997
xcb.flags = 0;
998
xcb.connection = (Connection *)winsysData1_;
999
xcb.window = (Window)(uintptr_t)winsysData2_;
1000
retval = vkCreateXcbSurfaceKHR(instance_, &xcb, nullptr, &surface_);
1001
break;
1002
}
1003
#endif
1004
#if defined(VK_USE_PLATFORM_WAYLAND_KHR)
1005
case WINDOWSYSTEM_WAYLAND:
1006
{
1007
VkWaylandSurfaceCreateInfoKHR wayland{ VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR };
1008
wayland.flags = 0;
1009
wayland.display = (wl_display *)winsysData1_;
1010
wayland.surface = (wl_surface *)winsysData2_;
1011
retval = vkCreateWaylandSurfaceKHR(instance_, &wayland, nullptr, &surface_);
1012
break;
1013
}
1014
#endif
1015
#if defined(VK_USE_PLATFORM_DISPLAY_KHR)
1016
case WINDOWSYSTEM_DISPLAY:
1017
{
1018
VkDisplaySurfaceCreateInfoKHR display{ VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR };
1019
#if !defined(__LIBRETRO__)
1020
/*
1021
And when not to use libretro need VkDisplaySurfaceCreateInfoKHR this extension,
1022
then you need to use dlopen to read vulkan loader in VulkanLoader.cpp.
1023
huangzihan China
1024
*/
1025
1026
if(!vkGetPhysicalDeviceDisplayPropertiesKHR ||
1027
!vkGetPhysicalDeviceDisplayPlanePropertiesKHR ||
1028
!vkGetDisplayModePropertiesKHR ||
1029
!vkGetDisplayPlaneSupportedDisplaysKHR ||
1030
!vkGetDisplayPlaneCapabilitiesKHR ) {
1031
_assert_msg_(false, "DISPLAY Vulkan cannot find any vulkan function symbols.");
1032
return VK_ERROR_INITIALIZATION_FAILED;
1033
}
1034
1035
//The following code is for reference:
1036
// https://github.com/vanfanel/ppsspp
1037
// When using the VK_KHR_display extension and not using LIBRETRO, a complete
1038
// VkDisplaySurfaceCreateInfoKHR is needed.
1039
1040
uint32_t display_count;
1041
uint32_t plane_count;
1042
1043
VkDisplayPropertiesKHR *display_props = NULL;
1044
VkDisplayPlanePropertiesKHR *plane_props = NULL;
1045
VkDisplayModePropertiesKHR* mode_props = NULL;
1046
1047
VkExtent2D image_size;
1048
// This is the chosen physical_device, it has been chosen elsewhere.
1049
VkPhysicalDevice phys_device = physical_devices_[physical_device_];
1050
VkDisplayModeKHR display_mode = VK_NULL_HANDLE;
1051
VkDisplayPlaneAlphaFlagBitsKHR alpha_mode = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR;
1052
uint32_t plane = UINT32_MAX;
1053
1054
// For now, use the first available (connected) display.
1055
int display_index = 0;
1056
1057
VkResult result;
1058
bool ret = false;
1059
bool mode_found = false;
1060
1061
int i, j;
1062
1063
// 1 physical device can have N displays connected.
1064
// Vulkan only counts the connected displays.
1065
1066
// Get a list of displays on the physical device.
1067
display_count = 0;
1068
vkGetPhysicalDeviceDisplayPropertiesKHR(phys_device, &display_count, NULL);
1069
if (display_count == 0) {
1070
_assert_msg_(false, "DISPLAY Vulkan couldn't find any displays.");
1071
return VK_ERROR_INITIALIZATION_FAILED;
1072
}
1073
display_props = new VkDisplayPropertiesKHR[display_count];
1074
vkGetPhysicalDeviceDisplayPropertiesKHR(phys_device, &display_count, display_props);
1075
1076
// Get a list of display planes on the physical device.
1077
plane_count = 0;
1078
vkGetPhysicalDeviceDisplayPlanePropertiesKHR(phys_device, &plane_count, NULL);
1079
if (plane_count == 0) {
1080
_assert_msg_(false, "DISPLAY Vulkan couldn't find any planes on the physical device");
1081
return VK_ERROR_INITIALIZATION_FAILED;
1082
1083
}
1084
plane_props = new VkDisplayPlanePropertiesKHR[plane_count];
1085
vkGetPhysicalDeviceDisplayPlanePropertiesKHR(phys_device, &plane_count, plane_props);
1086
1087
// Get the Vulkan display we are going to use.
1088
VkDisplayKHR myDisplay = display_props[display_index].display;
1089
1090
// Get the list of display modes of the display
1091
uint32_t mode_count = 0;
1092
vkGetDisplayModePropertiesKHR(phys_device, myDisplay, &mode_count, NULL);
1093
if (mode_count == 0) {
1094
_assert_msg_(false, "DISPLAY Vulkan couldn't find any video modes on the display");
1095
return VK_ERROR_INITIALIZATION_FAILED;
1096
}
1097
mode_props = new VkDisplayModePropertiesKHR[mode_count];
1098
vkGetDisplayModePropertiesKHR(phys_device, myDisplay, &mode_count, mode_props);
1099
1100
// See if there's an appropiate mode available on the display
1101
display_mode = VK_NULL_HANDLE;
1102
for (i = 0; i < mode_count; ++i)
1103
{
1104
const VkDisplayModePropertiesKHR* mode = &mode_props[i];
1105
1106
if (mode->parameters.visibleRegion.width == g_display.pixel_xres &&
1107
mode->parameters.visibleRegion.height == g_display.pixel_yres)
1108
{
1109
display_mode = mode->displayMode;
1110
mode_found = true;
1111
break;
1112
}
1113
}
1114
1115
// Free the mode list now.
1116
delete [] mode_props;
1117
1118
// If there are no useable modes found on the display, error out
1119
if (display_mode == VK_NULL_HANDLE)
1120
{
1121
_assert_msg_(false, "DISPLAY Vulkan couldn't find any video modes on the display");
1122
return VK_ERROR_INITIALIZATION_FAILED;
1123
}
1124
1125
/* Iterate on the list of planes of the physical device
1126
to find a plane that matches these criteria:
1127
-It must be compatible with the chosen display + mode.
1128
-It isn't currently bound to another display.
1129
-It supports per-pixel alpha, if possible. */
1130
for (i = 0; i < plane_count; i++) {
1131
uint32_t supported_displays_count = 0;
1132
VkDisplayKHR* supported_displays;
1133
VkDisplayPlaneCapabilitiesKHR plane_caps;
1134
1135
/* See if the plane is compatible with the current display. */
1136
vkGetDisplayPlaneSupportedDisplaysKHR(phys_device, i, &supported_displays_count, NULL);
1137
if (supported_displays_count == 0) {
1138
/* This plane doesn't support any displays. Continue to the next plane. */
1139
continue;
1140
}
1141
1142
/* Get the list of displays supported by this plane. */
1143
supported_displays = new VkDisplayKHR[supported_displays_count];
1144
vkGetDisplayPlaneSupportedDisplaysKHR(phys_device, i,
1145
&supported_displays_count, supported_displays);
1146
1147
/* The plane must be bound to the chosen display, or not in use.
1148
If none of these is true, iterate to another plane. */
1149
if ( !( (plane_props[i].currentDisplay == myDisplay) ||
1150
(plane_props[i].currentDisplay == VK_NULL_HANDLE)))
1151
continue;
1152
1153
/* Iterate the list of displays supported by this plane
1154
in order to find out if the chosen display is among them. */
1155
bool plane_supports_display = false;
1156
for (j = 0; j < supported_displays_count; j++) {
1157
if (supported_displays[j] == myDisplay) {
1158
plane_supports_display = true;
1159
break;
1160
}
1161
}
1162
1163
/* Free the list of displays supported by this plane. */
1164
delete [] supported_displays;
1165
1166
/* If the display is not supported by this plane, iterate to the next plane. */
1167
if (!plane_supports_display)
1168
continue;
1169
1170
/* Want a plane that supports the alpha mode we have chosen. */
1171
vkGetDisplayPlaneCapabilitiesKHR(phys_device, display_mode, i, &plane_caps);
1172
if (plane_caps.supportedAlpha & alpha_mode) {
1173
/* Yep, this plane is alright. */
1174
plane = i;
1175
break;
1176
}
1177
}
1178
1179
/* If we couldn't find an appropiate plane, error out. */
1180
if (plane == UINT32_MAX) {
1181
_assert_msg_(false, "DISPLAY Vulkan couldn't find an appropiate plane");
1182
return VK_ERROR_INITIALIZATION_FAILED;
1183
}
1184
1185
// Finally, create the vulkan surface.
1186
image_size.width = g_display.pixel_xres;
1187
image_size.height = g_display.pixel_yres;
1188
1189
display.displayMode = display_mode;
1190
display.imageExtent = image_size;
1191
display.transform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
1192
display.alphaMode = alpha_mode;
1193
display.globalAlpha = 1.0f;
1194
display.planeIndex = plane;
1195
display.planeStackIndex = plane_props[plane].currentStackIndex;
1196
display.pNext = nullptr;
1197
delete [] display_props;
1198
delete [] plane_props;
1199
#endif
1200
display.flags = 0;
1201
retval = vkCreateDisplayPlaneSurfaceKHR(instance_, &display, nullptr, &surface_);
1202
break;
1203
}
1204
#endif
1205
1206
default:
1207
_assert_msg_(false, "Vulkan support for chosen window system not implemented");
1208
return VK_ERROR_INITIALIZATION_FAILED;
1209
}
1210
1211
if (retval != VK_SUCCESS) {
1212
return retval;
1213
}
1214
1215
if (!ChooseQueue()) {
1216
return VK_ERROR_INITIALIZATION_FAILED;
1217
}
1218
1219
for (int i = 0; i < ARRAY_SIZE(frame_); i++) {
1220
frame_[i].profiler.Init(this);
1221
}
1222
1223
return VK_SUCCESS;
1224
}
1225
1226
bool VulkanContext::ChooseQueue() {
1227
// Iterate over each queue to learn whether it supports presenting:
1228
VkBool32 *supportsPresent = new VkBool32[queue_count];
1229
for (uint32_t i = 0; i < queue_count; i++) {
1230
vkGetPhysicalDeviceSurfaceSupportKHR(physical_devices_[physical_device_], i, surface_, &supportsPresent[i]);
1231
}
1232
1233
// Search for a graphics queue and a present queue in the array of queue
1234
// families, try to find one that supports both
1235
uint32_t graphicsQueueNodeIndex = UINT32_MAX;
1236
uint32_t presentQueueNodeIndex = UINT32_MAX;
1237
for (uint32_t i = 0; i < queue_count; i++) {
1238
if ((queueFamilyProperties_[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
1239
if (graphicsQueueNodeIndex == UINT32_MAX) {
1240
graphicsQueueNodeIndex = i;
1241
}
1242
1243
if (supportsPresent[i] == VK_TRUE) {
1244
graphicsQueueNodeIndex = i;
1245
presentQueueNodeIndex = i;
1246
break;
1247
}
1248
}
1249
}
1250
if (presentQueueNodeIndex == UINT32_MAX) {
1251
// If didn't find a queue that supports both graphics and present, then
1252
// find a separate present queue. NOTE: We don't actually currently support this arrangement!
1253
for (uint32_t i = 0; i < queue_count; ++i) {
1254
if (supportsPresent[i] == VK_TRUE) {
1255
presentQueueNodeIndex = i;
1256
break;
1257
}
1258
}
1259
}
1260
delete[] supportsPresent;
1261
1262
// Generate error if could not find both a graphics and a present queue
1263
if (graphicsQueueNodeIndex == UINT32_MAX || presentQueueNodeIndex == UINT32_MAX) {
1264
ERROR_LOG(Log::G3D, "Could not find a graphics and a present queue");
1265
return false;
1266
}
1267
1268
graphics_queue_family_index_ = graphicsQueueNodeIndex;
1269
1270
// Get the list of VkFormats that are supported:
1271
uint32_t formatCount = 0;
1272
VkResult res = vkGetPhysicalDeviceSurfaceFormatsKHR(physical_devices_[physical_device_], surface_, &formatCount, nullptr);
1273
_assert_msg_(res == VK_SUCCESS, "Failed to get formats for device %d: %d", physical_device_, (int)res);
1274
if (res != VK_SUCCESS) {
1275
return false;
1276
}
1277
1278
surfFormats_.resize(formatCount);
1279
res = vkGetPhysicalDeviceSurfaceFormatsKHR(physical_devices_[physical_device_], surface_, &formatCount, surfFormats_.data());
1280
_dbg_assert_(res == VK_SUCCESS);
1281
if (res != VK_SUCCESS) {
1282
return false;
1283
}
1284
// If the format list includes just one entry of VK_FORMAT_UNDEFINED,
1285
// the surface has no preferred format. Otherwise, at least one
1286
// supported format will be returned.
1287
if (formatCount == 0 || (formatCount == 1 && surfFormats_[0].format == VK_FORMAT_UNDEFINED)) {
1288
INFO_LOG(Log::G3D, "swapchain_format: Falling back to B8G8R8A8_UNORM");
1289
swapchainFormat_ = VK_FORMAT_B8G8R8A8_UNORM;
1290
} else {
1291
swapchainFormat_ = VK_FORMAT_UNDEFINED;
1292
for (uint32_t i = 0; i < formatCount; ++i) {
1293
if (surfFormats_[i].colorSpace != VK_COLORSPACE_SRGB_NONLINEAR_KHR) {
1294
continue;
1295
}
1296
if (surfFormats_[i].format == VK_FORMAT_B8G8R8A8_UNORM || surfFormats_[i].format == VK_FORMAT_R8G8B8A8_UNORM) {
1297
swapchainFormat_ = surfFormats_[i].format;
1298
break;
1299
}
1300
}
1301
if (swapchainFormat_ == VK_FORMAT_UNDEFINED) {
1302
// Okay, take the first one then.
1303
swapchainFormat_ = surfFormats_[0].format;
1304
}
1305
INFO_LOG(Log::G3D, "swapchain_format: %s (%d) (/%d)", VulkanFormatToString(swapchainFormat_), (int)swapchainFormat_, formatCount);
1306
}
1307
1308
vkGetDeviceQueue(device_, graphics_queue_family_index_, 0, &gfx_queue_);
1309
return true;
1310
}
1311
1312
int clamp(int x, int a, int b) {
1313
if (x < a)
1314
return a;
1315
if (x > b)
1316
return b;
1317
return x;
1318
}
1319
1320
static std::string surface_transforms_to_string(VkSurfaceTransformFlagsKHR transformFlags) {
1321
std::string str;
1322
if (transformFlags & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) str += "IDENTITY ";
1323
if (transformFlags & VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR) str += "ROTATE_90 ";
1324
if (transformFlags & VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR) str += "ROTATE_180 ";
1325
if (transformFlags & VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR) str += "ROTATE_270 ";
1326
if (transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR) str += "HMIRROR ";
1327
if (transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR) str += "HMIRROR_90 ";
1328
if (transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR) str += "HMIRROR_180 ";
1329
if (transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR) str += "HMIRROR_270 ";
1330
if (transformFlags & VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR) str += "INHERIT ";
1331
return str;
1332
}
1333
1334
bool VulkanContext::InitSwapchain() {
1335
_assert_(physical_device_ >= 0 && physical_device_ < (int)physical_devices_.size());
1336
if (!surface_) {
1337
ERROR_LOG(Log::G3D, "VK: No surface, can't create swapchain");
1338
return false;
1339
}
1340
1341
VkResult res = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_devices_[physical_device_], surface_, &surfCapabilities_);
1342
if (res == VK_ERROR_SURFACE_LOST_KHR) {
1343
// Not much to do.
1344
ERROR_LOG(Log::G3D, "VK: Surface lost in InitSwapchain");
1345
return false;
1346
}
1347
1348
if (surfCapabilities_.maxImageExtent.width == 0 || surfCapabilities_.maxImageExtent.height == 0) {
1349
WARN_LOG(Log::G3D, "Max image extent is 0 - app is probably minimized. Faking having a swapchain.");
1350
swapChainExtent_ = {}; // makes it so querying width/height returns 0.
1351
// We pretend to have a swapchain initialized - though we won't actually render to it.
1352
swapchainInited_ = true;
1353
return true;
1354
}
1355
1356
_dbg_assert_(res == VK_SUCCESS);
1357
uint32_t presentModeCount;
1358
res = vkGetPhysicalDeviceSurfacePresentModesKHR(physical_devices_[physical_device_], surface_, &presentModeCount, nullptr);
1359
_dbg_assert_(res == VK_SUCCESS);
1360
VkPresentModeKHR *presentModes = new VkPresentModeKHR[presentModeCount];
1361
_dbg_assert_(presentModes);
1362
res = vkGetPhysicalDeviceSurfacePresentModesKHR(physical_devices_[physical_device_], surface_, &presentModeCount, presentModes);
1363
_dbg_assert_(res == VK_SUCCESS);
1364
1365
VkExtent2D currentExtent{ surfCapabilities_.currentExtent };
1366
// https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VkSurfaceCapabilitiesKHR.html
1367
// currentExtent is the current width and height of the surface, or the special value (0xFFFFFFFF, 0xFFFFFFFF) indicating that the surface size will be determined by the extent of a swapchain targeting the surface.
1368
if (currentExtent.width == 0xFFFFFFFFu || currentExtent.height == 0xFFFFFFFFu
1369
#if PPSSPP_PLATFORM(IOS)
1370
|| currentExtent.width == 0 || currentExtent.height == 0
1371
#endif
1372
) {
1373
_dbg_assert_((bool)cbGetDrawSize_);
1374
if (cbGetDrawSize_) {
1375
currentExtent = cbGetDrawSize_();
1376
}
1377
}
1378
1379
swapChainExtent_.width = clamp(currentExtent.width, surfCapabilities_.minImageExtent.width, surfCapabilities_.maxImageExtent.width);
1380
swapChainExtent_.height = clamp(currentExtent.height, surfCapabilities_.minImageExtent.height, surfCapabilities_.maxImageExtent.height);
1381
1382
INFO_LOG(Log::G3D, "surfCapabilities_.current: %dx%d min: %dx%d max: %dx%d computed: %dx%d",
1383
currentExtent.width, currentExtent.height,
1384
surfCapabilities_.minImageExtent.width, surfCapabilities_.minImageExtent.height,
1385
surfCapabilities_.maxImageExtent.width, surfCapabilities_.maxImageExtent.height,
1386
swapChainExtent_.width, swapChainExtent_.height);
1387
1388
availablePresentModes_.clear();
1389
// TODO: Find a better way to specify the prioritized present mode while being able
1390
// to fall back in a sensible way.
1391
VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_MAX_ENUM_KHR;
1392
std::string modes = "";
1393
for (size_t i = 0; i < presentModeCount; i++) {
1394
modes += VulkanPresentModeToString(presentModes[i]);
1395
if (i != presentModeCount - 1) {
1396
modes += ", ";
1397
}
1398
availablePresentModes_.push_back(presentModes[i]);
1399
}
1400
1401
for (size_t i = 0; i < presentModeCount; i++) {
1402
bool match = false;
1403
match = match || ((flags_ & VulkanInitFlags::PRESENT_MAILBOX) && presentModes[i] == VK_PRESENT_MODE_MAILBOX_KHR);
1404
match = match || ((flags_ & VulkanInitFlags::PRESENT_IMMEDIATE) && presentModes[i] == VK_PRESENT_MODE_IMMEDIATE_KHR);
1405
match = match || ((flags_ & VulkanInitFlags::PRESENT_FIFO_RELAXED) && presentModes[i] == VK_PRESENT_MODE_FIFO_RELAXED_KHR);
1406
match = match || ((flags_ & VulkanInitFlags::PRESENT_FIFO) && presentModes[i] == VK_PRESENT_MODE_FIFO_KHR);
1407
1408
// Default to the first present mode from the list.
1409
if (match || swapchainPresentMode == VK_PRESENT_MODE_MAX_ENUM_KHR) {
1410
swapchainPresentMode = presentModes[i];
1411
}
1412
if (match) {
1413
break;
1414
}
1415
}
1416
delete[] presentModes;
1417
// Determine the number of VkImage's to use in the swap chain (we desire to
1418
// own only 1 image at a time, besides the images being displayed and
1419
// queued for display):
1420
uint32_t desiredNumberOfSwapChainImages = surfCapabilities_.minImageCount + 1;
1421
if ((surfCapabilities_.maxImageCount > 0) &&
1422
(desiredNumberOfSwapChainImages > surfCapabilities_.maxImageCount)) {
1423
// Application must settle for fewer images than desired:
1424
desiredNumberOfSwapChainImages = surfCapabilities_.maxImageCount;
1425
}
1426
1427
INFO_LOG(Log::G3D, "Supported present modes: %s. Chosen present mode: %d (%s). numSwapChainImages: %d (max: %d)",
1428
modes.c_str(), swapchainPresentMode, VulkanPresentModeToString(swapchainPresentMode),
1429
desiredNumberOfSwapChainImages, surfCapabilities_.maxImageCount);
1430
1431
// We mostly follow the practices from
1432
// https://arm-software.github.io/vulkan_best_practice_for_mobile_developers/samples/surface_rotation/surface_rotation_tutorial.html
1433
//
1434
VkSurfaceTransformFlagBitsKHR preTransform;
1435
std::string supportedTransforms = surface_transforms_to_string(surfCapabilities_.supportedTransforms);
1436
std::string currentTransform = surface_transforms_to_string(surfCapabilities_.currentTransform);
1437
g_display.rotation = DisplayRotation::ROTATE_0;
1438
g_display.rot_matrix.setIdentity();
1439
1440
uint32_t allowedRotations = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR | VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR | VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR;
1441
// Hack: Don't allow 270 degrees pretransform (inverse landscape), it creates bizarre issues on some devices (see #15773).
1442
allowedRotations &= ~VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR;
1443
1444
if (surfCapabilities_.currentTransform & (VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR | VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR)) {
1445
preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
1446
} else if (surfCapabilities_.currentTransform & allowedRotations) {
1447
// Normal, sensible rotations. Let's handle it.
1448
preTransform = surfCapabilities_.currentTransform;
1449
g_display.rot_matrix.setIdentity();
1450
switch (surfCapabilities_.currentTransform) {
1451
case VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR:
1452
g_display.rotation = DisplayRotation::ROTATE_90;
1453
g_display.rot_matrix.setRotationZ90();
1454
std::swap(swapChainExtent_.width, swapChainExtent_.height);
1455
break;
1456
case VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR:
1457
g_display.rotation = DisplayRotation::ROTATE_180;
1458
g_display.rot_matrix.setRotationZ180();
1459
break;
1460
case VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR:
1461
g_display.rotation = DisplayRotation::ROTATE_270;
1462
g_display.rot_matrix.setRotationZ270();
1463
std::swap(swapChainExtent_.width, swapChainExtent_.height);
1464
break;
1465
default:
1466
_dbg_assert_(false);
1467
}
1468
} else {
1469
// Let the OS rotate the image (potentially slower on many Android devices)
1470
preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
1471
}
1472
1473
// Only log transforms if relevant.
1474
if (surfCapabilities_.supportedTransforms != VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) {
1475
std::string preTransformStr = surface_transforms_to_string(preTransform);
1476
INFO_LOG(Log::G3D, "Transform supported: %s current: %s chosen: %s", supportedTransforms.c_str(), currentTransform.c_str(), preTransformStr.c_str());
1477
}
1478
1479
if (physicalDeviceProperties_[physical_device_].properties.vendorID == VULKAN_VENDOR_IMGTEC) {
1480
u32 driverVersion = physicalDeviceProperties_[physical_device_].properties.driverVersion;
1481
// Cutoff the hack at driver version 1.386.1368 (0x00582558, see issue #15773).
1482
if (driverVersion < 0x00582558) {
1483
INFO_LOG(Log::G3D, "Applying PowerVR hack (rounding off the width!) driverVersion=%08x", driverVersion);
1484
// Swap chain width hack to avoid issue #11743 (PowerVR driver bug).
1485
// To keep the size consistent even with pretransform, do this after the swap. Should be fine.
1486
// This is fixed in newer PowerVR drivers but I don't know the cutoff.
1487
swapChainExtent_.width &= ~31;
1488
1489
// TODO: Also modify display_xres/display_yres appropriately for scissors to match.
1490
// This will get a bit messy. Ideally we should remove that logic from app-android.cpp
1491
// and move it here, but the OpenGL code still needs it.
1492
} else {
1493
INFO_LOG(Log::G3D, "PowerVR driver version new enough (%08x), not applying swapchain width hack", driverVersion);
1494
}
1495
}
1496
1497
VkSwapchainCreateInfoKHR swap_chain_info{ VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR };
1498
swap_chain_info.surface = surface_;
1499
swap_chain_info.minImageCount = desiredNumberOfSwapChainImages;
1500
swap_chain_info.imageFormat = swapchainFormat_;
1501
swap_chain_info.imageColorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
1502
swap_chain_info.imageExtent.width = swapChainExtent_.width;
1503
swap_chain_info.imageExtent.height = swapChainExtent_.height;
1504
swap_chain_info.preTransform = preTransform;
1505
swap_chain_info.imageArrayLayers = 1;
1506
swap_chain_info.presentMode = swapchainPresentMode;
1507
swap_chain_info.oldSwapchain = VK_NULL_HANDLE;
1508
swap_chain_info.clipped = true;
1509
swap_chain_info.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
1510
1511
presentMode_ = swapchainPresentMode;
1512
1513
// We don't support screenshots on Android if TRANSFER_SRC usage flag is not supported.
1514
if (surfCapabilities_.supportedUsageFlags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) {
1515
swap_chain_info.imageUsage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1516
}
1517
1518
swap_chain_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
1519
swap_chain_info.queueFamilyIndexCount = 0;
1520
swap_chain_info.pQueueFamilyIndices = NULL;
1521
// OPAQUE is not supported everywhere.
1522
if (surfCapabilities_.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR) {
1523
swap_chain_info.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
1524
} else {
1525
// This should be supported anywhere, and is the only thing supported on the SHIELD TV, for example.
1526
swap_chain_info.compositeAlpha = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
1527
}
1528
1529
res = vkCreateSwapchainKHR(device_, &swap_chain_info, NULL, &swapchain_);
1530
if (res != VK_SUCCESS) {
1531
ERROR_LOG(Log::G3D, "vkCreateSwapchainKHR failed!");
1532
return false;
1533
}
1534
INFO_LOG(Log::G3D, "Created swapchain: %dx%d %s", swap_chain_info.imageExtent.width, swap_chain_info.imageExtent.height, (surfCapabilities_.supportedUsageFlags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) ? "(TRANSFER_SRC_BIT supported)" : "");
1535
swapchainInited_ = true;
1536
return true;
1537
}
1538
1539
void VulkanContext::SetCbGetDrawSize(std::function<VkExtent2D()> cb) {
1540
cbGetDrawSize_ = cb;
1541
}
1542
1543
VkFence VulkanContext::CreateFence(bool presignalled) {
1544
VkFence fence;
1545
VkFenceCreateInfo fenceInfo{ VK_STRUCTURE_TYPE_FENCE_CREATE_INFO };
1546
fenceInfo.flags = presignalled ? VK_FENCE_CREATE_SIGNALED_BIT : 0;
1547
vkCreateFence(device_, &fenceInfo, NULL, &fence);
1548
return fence;
1549
}
1550
1551
void VulkanContext::PerformPendingDeletes() {
1552
for (int i = 0; i < ARRAY_SIZE(frame_); i++) {
1553
frame_[i].deleteList.PerformDeletes(this, allocator_);
1554
}
1555
Delete().PerformDeletes(this, allocator_);
1556
}
1557
1558
void VulkanContext::DestroyDevice() {
1559
if (swapchain_) {
1560
ERROR_LOG(Log::G3D, "DestroyDevice: Swapchain should have been destroyed.");
1561
}
1562
if (surface_) {
1563
ERROR_LOG(Log::G3D, "DestroyDevice: Surface should have been destroyed.");
1564
}
1565
1566
for (int i = 0; i < ARRAY_SIZE(frame_); i++) {
1567
frame_[i].profiler.Shutdown();
1568
}
1569
1570
INFO_LOG(Log::G3D, "VulkanContext::DestroyDevice (performing deletes)");
1571
PerformPendingDeletes();
1572
1573
vmaDestroyAllocator(allocator_);
1574
allocator_ = VK_NULL_HANDLE;
1575
1576
vkDestroyDevice(device_, nullptr);
1577
device_ = nullptr;
1578
}
1579
1580
bool VulkanContext::CreateShaderModule(const std::vector<uint32_t> &spirv, VkShaderModule *shaderModule, const char *tag) {
1581
VkShaderModuleCreateInfo sm{ VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO };
1582
sm.pCode = spirv.data();
1583
sm.codeSize = spirv.size() * sizeof(uint32_t);
1584
sm.flags = 0;
1585
VkResult result = vkCreateShaderModule(device_, &sm, nullptr, shaderModule);
1586
if (tag) {
1587
SetDebugName(*shaderModule, VK_OBJECT_TYPE_SHADER_MODULE, tag);
1588
}
1589
if (result != VK_SUCCESS) {
1590
return false;
1591
} else {
1592
return true;
1593
}
1594
}
1595
1596
EShLanguage FindLanguage(const VkShaderStageFlagBits shader_type) {
1597
switch (shader_type) {
1598
case VK_SHADER_STAGE_VERTEX_BIT:
1599
return EShLangVertex;
1600
1601
case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
1602
return EShLangTessControl;
1603
1604
case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
1605
return EShLangTessEvaluation;
1606
1607
case VK_SHADER_STAGE_GEOMETRY_BIT:
1608
return EShLangGeometry;
1609
1610
case VK_SHADER_STAGE_FRAGMENT_BIT:
1611
return EShLangFragment;
1612
1613
case VK_SHADER_STAGE_COMPUTE_BIT:
1614
return EShLangCompute;
1615
1616
default:
1617
return EShLangVertex;
1618
}
1619
}
1620
1621
// Compile a given string containing GLSL into SPV for use by VK
1622
// Return value of false means an error was encountered.
1623
bool GLSLtoSPV(const VkShaderStageFlagBits shader_type, const char *sourceCode, GLSLVariant variant,
1624
std::vector<unsigned int> &spirv, std::string *errorMessage) {
1625
1626
glslang::TProgram program;
1627
const char *shaderStrings[1];
1628
TBuiltInResource Resources{};
1629
InitShaderResources(Resources);
1630
1631
int defaultVersion = 0;
1632
EShMessages messages;
1633
EProfile profile;
1634
1635
switch (variant) {
1636
case GLSLVariant::VULKAN:
1637
// Enable SPIR-V and Vulkan rules when parsing GLSL
1638
messages = (EShMessages)(EShMsgSpvRules | EShMsgVulkanRules);
1639
defaultVersion = 450;
1640
profile = ECoreProfile;
1641
break;
1642
case GLSLVariant::GL140:
1643
messages = (EShMessages)(EShMsgDefault);
1644
defaultVersion = 140;
1645
profile = ECompatibilityProfile;
1646
break;
1647
case GLSLVariant::GLES300:
1648
messages = (EShMessages)(EShMsgDefault);
1649
defaultVersion = 300;
1650
profile = EEsProfile;
1651
break;
1652
default:
1653
return false;
1654
}
1655
1656
EShLanguage stage = FindLanguage(shader_type);
1657
glslang::TShader shader(stage);
1658
1659
shaderStrings[0] = sourceCode;
1660
shader.setStrings(shaderStrings, 1);
1661
1662
if (!shader.parse(&Resources, defaultVersion, profile, false, true, messages)) {
1663
puts(shader.getInfoLog());
1664
puts(shader.getInfoDebugLog());
1665
if (errorMessage) {
1666
*errorMessage = shader.getInfoLog();
1667
(*errorMessage) += shader.getInfoDebugLog();
1668
}
1669
return false; // something didn't work
1670
}
1671
1672
// TODO: Propagate warnings into errorMessages even if we succeeded here.
1673
1674
// Note that program does not take ownership of &shader, so this is fine.
1675
program.addShader(&shader);
1676
1677
if (!program.link(messages)) {
1678
puts(shader.getInfoLog());
1679
puts(shader.getInfoDebugLog());
1680
if (errorMessage) {
1681
*errorMessage = shader.getInfoLog();
1682
(*errorMessage) += shader.getInfoDebugLog();
1683
}
1684
return false;
1685
}
1686
1687
// Can't fail, parsing worked, "linking" worked.
1688
glslang::SpvOptions options;
1689
options.disableOptimizer = false;
1690
options.optimizeSize = false;
1691
options.generateDebugInfo = false;
1692
glslang::GlslangToSpv(*program.getIntermediate(stage), spirv, &options);
1693
return true;
1694
}
1695
1696
void init_glslang() {
1697
glslang::InitializeProcess();
1698
}
1699
1700
void finalize_glslang() {
1701
glslang::FinalizeProcess();
1702
}
1703
1704
void VulkanDeleteList::Take(VulkanDeleteList &del) {
1705
_dbg_assert_(cmdPools_.empty());
1706
_dbg_assert_(descPools_.empty());
1707
_dbg_assert_(modules_.empty());
1708
_dbg_assert_(buffers_.empty());
1709
_dbg_assert_(bufferViews_.empty());
1710
_dbg_assert_(buffersWithAllocs_.empty());
1711
_dbg_assert_(imageViews_.empty());
1712
_dbg_assert_(imagesWithAllocs_.empty());
1713
_dbg_assert_(deviceMemory_.empty());
1714
_dbg_assert_(samplers_.empty());
1715
_dbg_assert_(pipelines_.empty());
1716
_dbg_assert_(pipelineCaches_.empty());
1717
_dbg_assert_(renderPasses_.empty());
1718
_dbg_assert_(framebuffers_.empty());
1719
_dbg_assert_(pipelineLayouts_.empty());
1720
_dbg_assert_(descSetLayouts_.empty());
1721
_dbg_assert_(callbacks_.empty());
1722
cmdPools_ = std::move(del.cmdPools_);
1723
descPools_ = std::move(del.descPools_);
1724
modules_ = std::move(del.modules_);
1725
buffers_ = std::move(del.buffers_);
1726
buffersWithAllocs_ = std::move(del.buffersWithAllocs_);
1727
bufferViews_ = std::move(del.bufferViews_);
1728
imageViews_ = std::move(del.imageViews_);
1729
imagesWithAllocs_ = std::move(del.imagesWithAllocs_);
1730
deviceMemory_ = std::move(del.deviceMemory_);
1731
samplers_ = std::move(del.samplers_);
1732
pipelines_ = std::move(del.pipelines_);
1733
pipelineCaches_ = std::move(del.pipelineCaches_);
1734
renderPasses_ = std::move(del.renderPasses_);
1735
framebuffers_ = std::move(del.framebuffers_);
1736
pipelineLayouts_ = std::move(del.pipelineLayouts_);
1737
descSetLayouts_ = std::move(del.descSetLayouts_);
1738
callbacks_ = std::move(del.callbacks_);
1739
del.cmdPools_.clear();
1740
del.descPools_.clear();
1741
del.modules_.clear();
1742
del.buffers_.clear();
1743
del.buffersWithAllocs_.clear();
1744
del.imageViews_.clear();
1745
del.imagesWithAllocs_.clear();
1746
del.deviceMemory_.clear();
1747
del.samplers_.clear();
1748
del.pipelines_.clear();
1749
del.pipelineCaches_.clear();
1750
del.renderPasses_.clear();
1751
del.framebuffers_.clear();
1752
del.pipelineLayouts_.clear();
1753
del.descSetLayouts_.clear();
1754
del.callbacks_.clear();
1755
}
1756
1757
void VulkanDeleteList::PerformDeletes(VulkanContext *vulkan, VmaAllocator allocator) {
1758
int deleteCount = 0;
1759
1760
for (auto &callback : callbacks_) {
1761
callback.func(vulkan, callback.userdata);
1762
deleteCount++;
1763
}
1764
callbacks_.clear();
1765
1766
VkDevice device = vulkan->GetDevice();
1767
for (auto &cmdPool : cmdPools_) {
1768
vkDestroyCommandPool(device, cmdPool, nullptr);
1769
deleteCount++;
1770
}
1771
cmdPools_.clear();
1772
for (auto &descPool : descPools_) {
1773
vkDestroyDescriptorPool(device, descPool, nullptr);
1774
deleteCount++;
1775
}
1776
descPools_.clear();
1777
for (auto &module : modules_) {
1778
vkDestroyShaderModule(device, module, nullptr);
1779
deleteCount++;
1780
}
1781
modules_.clear();
1782
for (auto &buf : buffers_) {
1783
vkDestroyBuffer(device, buf, nullptr);
1784
deleteCount++;
1785
}
1786
buffers_.clear();
1787
for (auto &buf : buffersWithAllocs_) {
1788
vmaDestroyBuffer(allocator, buf.buffer, buf.alloc);
1789
deleteCount++;
1790
}
1791
buffersWithAllocs_.clear();
1792
for (auto &bufView : bufferViews_) {
1793
vkDestroyBufferView(device, bufView, nullptr);
1794
deleteCount++;
1795
}
1796
bufferViews_.clear();
1797
for (auto &imageWithAlloc : imagesWithAllocs_) {
1798
vmaDestroyImage(allocator, imageWithAlloc.image, imageWithAlloc.alloc);
1799
deleteCount++;
1800
}
1801
imagesWithAllocs_.clear();
1802
for (auto &imageView : imageViews_) {
1803
vkDestroyImageView(device, imageView, nullptr);
1804
deleteCount++;
1805
}
1806
imageViews_.clear();
1807
for (auto &mem : deviceMemory_) {
1808
vkFreeMemory(device, mem, nullptr);
1809
deleteCount++;
1810
}
1811
deviceMemory_.clear();
1812
for (auto &sampler : samplers_) {
1813
vkDestroySampler(device, sampler, nullptr);
1814
deleteCount++;
1815
}
1816
samplers_.clear();
1817
for (auto &pipeline : pipelines_) {
1818
vkDestroyPipeline(device, pipeline, nullptr);
1819
deleteCount++;
1820
}
1821
pipelines_.clear();
1822
for (auto &pcache : pipelineCaches_) {
1823
vkDestroyPipelineCache(device, pcache, nullptr);
1824
deleteCount++;
1825
}
1826
pipelineCaches_.clear();
1827
for (auto &renderPass : renderPasses_) {
1828
vkDestroyRenderPass(device, renderPass, nullptr);
1829
deleteCount++;
1830
}
1831
renderPasses_.clear();
1832
for (auto &framebuffer : framebuffers_) {
1833
vkDestroyFramebuffer(device, framebuffer, nullptr);
1834
deleteCount++;
1835
}
1836
framebuffers_.clear();
1837
for (auto &pipeLayout : pipelineLayouts_) {
1838
vkDestroyPipelineLayout(device, pipeLayout, nullptr);
1839
deleteCount++;
1840
}
1841
pipelineLayouts_.clear();
1842
for (auto &descSetLayout : descSetLayouts_) {
1843
vkDestroyDescriptorSetLayout(device, descSetLayout, nullptr);
1844
deleteCount++;
1845
}
1846
descSetLayouts_.clear();
1847
for (auto &queryPool : queryPools_) {
1848
vkDestroyQueryPool(device, queryPool, nullptr);
1849
deleteCount++;
1850
}
1851
queryPools_.clear();
1852
deleteCount_ = deleteCount;
1853
}
1854
1855
void VulkanContext::GetImageMemoryRequirements(VkImage image, VkMemoryRequirements *mem_reqs, bool *dedicatedAllocation) {
1856
if (Extensions().KHR_dedicated_allocation) {
1857
VkImageMemoryRequirementsInfo2KHR memReqInfo2{VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR};
1858
memReqInfo2.image = image;
1859
1860
VkMemoryRequirements2KHR memReq2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR};
1861
VkMemoryDedicatedRequirementsKHR memDedicatedReq{VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR};
1862
ChainStruct(memReq2, &memDedicatedReq);
1863
1864
vkGetImageMemoryRequirements2(GetDevice(), &memReqInfo2, &memReq2);
1865
1866
*mem_reqs = memReq2.memoryRequirements;
1867
*dedicatedAllocation =
1868
(memDedicatedReq.requiresDedicatedAllocation != VK_FALSE) ||
1869
(memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
1870
} else {
1871
vkGetImageMemoryRequirements(GetDevice(), image, mem_reqs);
1872
*dedicatedAllocation = false;
1873
}
1874
}
1875
1876
bool IsHashMaliDriverVersion(const VkPhysicalDeviceProperties &props) {
1877
// ARM used to put a hash in place of the driver version.
1878
// Now they only use major versions. We'll just make a bad heuristic.
1879
uint32_t major = VK_VERSION_MAJOR(props.driverVersion);
1880
uint32_t branch = VK_VERSION_PATCH(props.driverVersion);
1881
if (branch > 0)
1882
return true;
1883
if (branch > 100 || major > 100)
1884
return true;
1885
// Can (in theory) have false negatives!
1886
return false;
1887
}
1888
1889
// From Sascha's code
1890
std::string FormatDriverVersion(const VkPhysicalDeviceProperties &props) {
1891
if (props.vendorID == VULKAN_VENDOR_NVIDIA) {
1892
// For whatever reason, NVIDIA has their own scheme.
1893
// 10 bits = major version (up to r1023)
1894
// 8 bits = minor version (up to 255)
1895
// 8 bits = secondary branch version/build version (up to 255)
1896
// 6 bits = tertiary branch/build version (up to 63)
1897
uint32_t major = (props.driverVersion >> 22) & 0x3ff;
1898
uint32_t minor = (props.driverVersion >> 14) & 0x0ff;
1899
uint32_t secondaryBranch = (props.driverVersion >> 6) & 0x0ff;
1900
uint32_t tertiaryBranch = (props.driverVersion) & 0x003f;
1901
return StringFromFormat("%d.%d.%d.%d", major, minor, secondaryBranch, tertiaryBranch);
1902
} else if (props.vendorID == VULKAN_VENDOR_ARM) {
1903
// ARM used to just put a hash here. No point in splitting it up.
1904
if (IsHashMaliDriverVersion(props)) {
1905
return StringFromFormat("(hash) %08x", props.driverVersion);
1906
}
1907
}
1908
// Qualcomm has an inscrutable versioning scheme. Let's just display it as normal.
1909
// Standard scheme, use the standard macros.
1910
uint32_t major = VK_VERSION_MAJOR(props.driverVersion);
1911
uint32_t minor = VK_VERSION_MINOR(props.driverVersion);
1912
uint32_t branch = VK_VERSION_PATCH(props.driverVersion);
1913
return StringFromFormat("%d.%d.%d (%08x)", major, minor, branch, props.driverVersion);
1914
}
1915
1916
std::string FormatAPIVersion(u32 version) {
1917
return StringFromFormat("%d.%d.%d", VK_API_VERSION_MAJOR(version), VK_API_VERSION_MINOR(version), VK_API_VERSION_PATCH(version));
1918
}
1919
1920
// Mainly just the formats seen on gpuinfo.org for swapchains, as this function is only used for listing
1921
// those in the UI. Also depth buffers that we used in one place.
1922
// Might add more in the future if we find more uses for this.
1923
const char *VulkanFormatToString(VkFormat format) {
1924
switch (format) {
1925
case VK_FORMAT_A1R5G5B5_UNORM_PACK16: return "A1R5G5B5_UNORM_PACK16";
1926
case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return "A2B10G10R10_UNORM_PACK32";
1927
case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return "A2R10G10B10_UNORM_PACK32";
1928
case VK_FORMAT_A8B8G8R8_SNORM_PACK32: return "A8B8G8R8_SNORM_PACK32";
1929
case VK_FORMAT_A8B8G8R8_SRGB_PACK32: return "A8B8G8R8_SRGB_PACK32";
1930
case VK_FORMAT_A8B8G8R8_UNORM_PACK32: return "A8B8G8R8_UNORM_PACK32";
1931
case VK_FORMAT_B10G11R11_UFLOAT_PACK32: return "B10G11R11_UFLOAT_PACK32";
1932
case VK_FORMAT_B4G4R4A4_UNORM_PACK16: return "B4G4R4A4_UNORM_PACK16";
1933
case VK_FORMAT_B5G5R5A1_UNORM_PACK16: return "B5G5R5A1_UNORM_PACK16";
1934
case VK_FORMAT_B5G6R5_UNORM_PACK16: return "B5G6R5_UNORM_PACK16";
1935
case VK_FORMAT_B8G8R8A8_SNORM: return "B8G8R8A8_SNORM";
1936
case VK_FORMAT_B8G8R8A8_SRGB: return "B8G8R8A8_SRGB";
1937
case VK_FORMAT_B8G8R8A8_UNORM: return "B8G8R8A8_UNORM";
1938
case VK_FORMAT_R16G16B16A16_SFLOAT: return "R16G16B16A16_SFLOAT";
1939
case VK_FORMAT_R16G16B16A16_SNORM: return "R16G16B16A16_SNORM";
1940
case VK_FORMAT_R16G16B16A16_UNORM: return "R16G16B16A16_UNORM";
1941
case VK_FORMAT_R4G4B4A4_UNORM_PACK16: return "R4G4B4A4_UNORM_PACK16";
1942
case VK_FORMAT_R5G5B5A1_UNORM_PACK16: return "R5G5B5A1_UNORM_PACK16";
1943
case VK_FORMAT_R5G6B5_UNORM_PACK16: return "R5G6B5_UNORM_PACK16";
1944
case VK_FORMAT_R8G8B8A8_SNORM: return "R8G8B8A8_SNORM";
1945
case VK_FORMAT_R8G8B8A8_SRGB: return "R8G8B8A8_SRGB";
1946
case VK_FORMAT_R8G8B8A8_UNORM: return "R8G8B8A8_UNORM";
1947
1948
case VK_FORMAT_D24_UNORM_S8_UINT: return "D24S8";
1949
case VK_FORMAT_D16_UNORM: return "D16";
1950
case VK_FORMAT_D16_UNORM_S8_UINT: return "D16S8";
1951
case VK_FORMAT_D32_SFLOAT: return "D32f";
1952
case VK_FORMAT_D32_SFLOAT_S8_UINT: return "D32fS8";
1953
case VK_FORMAT_S8_UINT: return "S8";
1954
case VK_FORMAT_UNDEFINED: return "UNDEFINED (BAD!)";
1955
1956
default: return "(format not added to string list)";
1957
}
1958
}
1959
1960
// I miss Rust where this is automatic :(
1961
const char *VulkanColorSpaceToString(VkColorSpaceKHR colorSpace) {
1962
switch (colorSpace) {
1963
case VK_COLOR_SPACE_SRGB_NONLINEAR_KHR: return "SRGB_NONLINEAR";
1964
case VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT: return "DISPLAY_P3_NONLINEAR";
1965
case VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT: return "EXTENDED_SRGB_LINEAR";
1966
case VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT: return "DISPLAY_P3_LINEAR";
1967
case VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT: return "DCI_P3_NONLINEAR";
1968
case VK_COLOR_SPACE_BT709_LINEAR_EXT: return "BT709_LINEAR";
1969
case VK_COLOR_SPACE_BT709_NONLINEAR_EXT: return "BT709_NONLINEAR";
1970
case VK_COLOR_SPACE_BT2020_LINEAR_EXT: return "BT2020_LINEAR";
1971
case VK_COLOR_SPACE_HDR10_ST2084_EXT: return "HDR10_ST2084";
1972
case VK_COLOR_SPACE_DOLBYVISION_EXT: return "DOLBYVISION";
1973
case VK_COLOR_SPACE_HDR10_HLG_EXT: return "HDR10_HLG";
1974
case VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT: return "ADOBERGB_LINEAR";
1975
case VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT: return "ADOBERGB_NONLINEAR";
1976
case VK_COLOR_SPACE_PASS_THROUGH_EXT: return "PASS_THROUGH";
1977
case VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT: return "EXTENDED_SRGB_NONLINEAR";
1978
case VK_COLOR_SPACE_DISPLAY_NATIVE_AMD: return "DISPLAY_NATIVE_AMD";
1979
default: return "(unknown)";
1980
}
1981
}
1982
1983