Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
hrydgard
GitHub Repository: hrydgard/ppsspp
Path: blob/master/Common/GPU/Vulkan/thin3d_vulkan.cpp
3187 views
1
// Copyright (c) 2015- PPSSPP Project.
2
3
// This program is free software: you can redistribute it and/or modify
4
// it under the terms of the GNU General Public License as published by
5
// the Free Software Foundation, version 2.0 or later versions.
6
7
// This program is distributed in the hope that it will be useful,
8
// but WITHOUT ANY WARRANTY; without even the implied warranty of
9
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10
// GNU General Public License 2.0 for more details.
11
12
// A copy of the GPL 2.0 should have been included with the program.
13
// If not, see http://www.gnu.org/licenses/
14
15
// Official git repository and contact information can be found at
16
// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
17
18
#include <cstdio>
19
#include <vector>
20
#include <string>
21
#include <map>
22
23
#include "Common/Log.h"
24
#include "Common/StringUtils.h"
25
#include "Common/System/Display.h"
26
#include "Common/Math/lin/matrix4x4.h"
27
#include "Common/Data/Convert/SmallDataConvert.h"
28
#include "Common/GPU/thin3d.h"
29
#include "Common/GPU/Vulkan/VulkanRenderManager.h"
30
#include "Common/GPU/Vulkan/VulkanContext.h"
31
#include "Common/GPU/Vulkan/VulkanImage.h"
32
#include "Common/GPU/Vulkan/VulkanMemory.h"
33
#include "Common/GPU/Vulkan/VulkanLoader.h"
34
#include "Common/Thread/Promise.h"
35
36
// For descriptor set 0 (the only one), we use a simple descriptor set for all thin3d rendering: 1 UBO binding point, 3 combined texture/samples.
37
//
38
// binding 0 - uniform buffer
39
// binding 1 - texture/sampler
40
// binding 2 - texture/sampler
41
// binding 3 - texture/sampler
42
//
43
// Vertex data lives in a separate namespace (location = 0, 1, etc).
44
45
using namespace PPSSPP_VK;
46
47
namespace Draw {
48
49
// This can actually be replaced with a cast as the values are in the right order.
50
static const VkCompareOp compToVK[] = {
51
VK_COMPARE_OP_NEVER,
52
VK_COMPARE_OP_LESS,
53
VK_COMPARE_OP_EQUAL,
54
VK_COMPARE_OP_LESS_OR_EQUAL,
55
VK_COMPARE_OP_GREATER,
56
VK_COMPARE_OP_NOT_EQUAL,
57
VK_COMPARE_OP_GREATER_OR_EQUAL,
58
VK_COMPARE_OP_ALWAYS
59
};
60
61
// So can this.
62
static const VkBlendOp blendEqToVk[] = {
63
VK_BLEND_OP_ADD,
64
VK_BLEND_OP_SUBTRACT,
65
VK_BLEND_OP_REVERSE_SUBTRACT,
66
VK_BLEND_OP_MIN,
67
VK_BLEND_OP_MAX,
68
};
69
70
static const VkBlendFactor blendFactorToVk[] = {
71
VK_BLEND_FACTOR_ZERO,
72
VK_BLEND_FACTOR_ONE,
73
VK_BLEND_FACTOR_SRC_COLOR,
74
VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
75
VK_BLEND_FACTOR_DST_COLOR,
76
VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
77
VK_BLEND_FACTOR_SRC_ALPHA,
78
VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
79
VK_BLEND_FACTOR_DST_ALPHA,
80
VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
81
VK_BLEND_FACTOR_CONSTANT_COLOR,
82
VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
83
VK_BLEND_FACTOR_CONSTANT_ALPHA,
84
VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
85
VK_BLEND_FACTOR_SRC1_COLOR,
86
VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR,
87
VK_BLEND_FACTOR_SRC1_ALPHA,
88
VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA,
89
};
90
91
static const VkLogicOp logicOpToVK[] = {
92
VK_LOGIC_OP_CLEAR,
93
VK_LOGIC_OP_SET,
94
VK_LOGIC_OP_COPY,
95
VK_LOGIC_OP_COPY_INVERTED,
96
VK_LOGIC_OP_NO_OP,
97
VK_LOGIC_OP_INVERT,
98
VK_LOGIC_OP_AND,
99
VK_LOGIC_OP_NAND,
100
VK_LOGIC_OP_OR,
101
VK_LOGIC_OP_NOR,
102
VK_LOGIC_OP_XOR,
103
VK_LOGIC_OP_EQUIVALENT,
104
VK_LOGIC_OP_AND_REVERSE,
105
VK_LOGIC_OP_AND_INVERTED,
106
VK_LOGIC_OP_OR_REVERSE,
107
VK_LOGIC_OP_OR_INVERTED,
108
};
109
110
static const VkPrimitiveTopology primToVK[] = {
111
VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
112
VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
113
VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
114
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
115
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
116
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN,
117
// Tesselation shader primitive.
118
VK_PRIMITIVE_TOPOLOGY_PATCH_LIST,
119
// The rest are for geometry shaders only.
120
VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
121
VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,
122
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
123
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY,
124
};
125
126
127
static const VkStencilOp stencilOpToVK[8] = {
128
VK_STENCIL_OP_KEEP,
129
VK_STENCIL_OP_ZERO,
130
VK_STENCIL_OP_REPLACE,
131
VK_STENCIL_OP_INCREMENT_AND_CLAMP,
132
VK_STENCIL_OP_DECREMENT_AND_CLAMP,
133
VK_STENCIL_OP_INVERT,
134
VK_STENCIL_OP_INCREMENT_AND_WRAP,
135
VK_STENCIL_OP_DECREMENT_AND_WRAP,
136
};
137
138
class VKBlendState : public BlendState {
139
public:
140
VkPipelineColorBlendStateCreateInfo info{ VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO };
141
std::vector<VkPipelineColorBlendAttachmentState> attachments;
142
};
143
144
class VKDepthStencilState : public DepthStencilState {
145
public:
146
VkPipelineDepthStencilStateCreateInfo info{ VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO };
147
};
148
149
class VKRasterState : public RasterState {
150
public:
151
VKRasterState(const RasterStateDesc &desc) {
152
cullFace = desc.cull;
153
frontFace = desc.frontFace;
154
}
155
Facing frontFace;
156
CullMode cullFace;
157
158
void ToVulkan(VkPipelineRasterizationStateCreateInfo *info) const {
159
memset(info, 0, sizeof(*info));
160
info->sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
161
info->frontFace = frontFace == Facing::CCW ? VK_FRONT_FACE_COUNTER_CLOCKWISE : VK_FRONT_FACE_CLOCKWISE;
162
switch (cullFace) {
163
case CullMode::BACK: info->cullMode = VK_CULL_MODE_BACK_BIT; break;
164
case CullMode::FRONT: info->cullMode = VK_CULL_MODE_FRONT_BIT; break;
165
case CullMode::FRONT_AND_BACK: info->cullMode = VK_CULL_MODE_FRONT_AND_BACK; break;
166
case CullMode::NONE: info->cullMode = VK_CULL_MODE_NONE; break;
167
}
168
info->polygonMode = VK_POLYGON_MODE_FILL;
169
info->lineWidth = 1.0f;
170
}
171
};
172
173
VkShaderStageFlagBits StageToVulkan(ShaderStage stage) {
174
switch (stage) {
175
case ShaderStage::Vertex: return VK_SHADER_STAGE_VERTEX_BIT;
176
case ShaderStage::Geometry: return VK_SHADER_STAGE_GEOMETRY_BIT;
177
case ShaderStage::Compute: return VK_SHADER_STAGE_COMPUTE_BIT;
178
case ShaderStage::Fragment: return VK_SHADER_STAGE_FRAGMENT_BIT;
179
}
180
return VK_SHADER_STAGE_FRAGMENT_BIT;
181
}
182
183
// Not registering this as a resource holder, instead the pipeline is registered. It will
184
// invoke Compile again to recreate the shader then link them together.
185
class VKShaderModule : public ShaderModule {
186
public:
187
VKShaderModule(ShaderStage stage, const std::string &tag) : stage_(stage), tag_(tag) {
188
vkstage_ = StageToVulkan(stage);
189
}
190
bool Compile(VulkanContext *vulkan, const uint8_t *data, size_t size);
191
const std::string &GetSource() const { return source_; }
192
~VKShaderModule() {
193
if (module_) {
194
VkShaderModule shaderModule = module_->BlockUntilReady();
195
vulkan_->Delete().QueueDeleteShaderModule(shaderModule);
196
vulkan_->Delete().QueueCallback([](VulkanContext *context, void *m) {
197
auto module = (Promise<VkShaderModule> *)m;
198
delete module;
199
}, module_);
200
}
201
}
202
Promise<VkShaderModule> *Get() const { return module_; }
203
ShaderStage GetStage() const override {
204
return stage_;
205
}
206
207
private:
208
VulkanContext *vulkan_ = nullptr;
209
Promise<VkShaderModule> *module_ = nullptr;
210
VkShaderStageFlagBits vkstage_;
211
bool ok_ = false;
212
ShaderStage stage_;
213
std::string source_; // So we can recompile in case of context loss.
214
std::string tag_;
215
};
216
217
bool VKShaderModule::Compile(VulkanContext *vulkan, const uint8_t *data, size_t size) {
218
// We'll need this to free it later.
219
vulkan_ = vulkan;
220
source_ = (const char *)data;
221
std::vector<uint32_t> spirv;
222
std::string errorMessage;
223
if (!GLSLtoSPV(vkstage_, source_.c_str(), GLSLVariant::VULKAN, spirv, &errorMessage)) {
224
WARN_LOG(Log::G3D, "Shader compile to module failed (%s): %s", tag_.c_str(), errorMessage.c_str());
225
return false;
226
}
227
228
// Just for kicks, sanity check the SPIR-V. The disasm isn't perfect
229
// but gives you some idea of what's going on.
230
#if 0
231
std::string disasm;
232
if (DisassembleSPIRV(spirv, &disasm)) {
233
OutputDebugStringA(disasm.c_str());
234
}
235
#endif
236
237
VkShaderModule shaderModule = VK_NULL_HANDLE;
238
if (vulkan->CreateShaderModule(spirv, &shaderModule, tag_.c_str())) {
239
module_ = Promise<VkShaderModule>::AlreadyDone(shaderModule);
240
ok_ = true;
241
} else {
242
WARN_LOG(Log::G3D, "vkCreateShaderModule failed (%s)", tag_.c_str());
243
ok_ = false;
244
}
245
return ok_;
246
}
247
248
class VKInputLayout : public InputLayout {
249
public:
250
VkVertexInputBindingDescription binding;
251
std::vector<VkVertexInputAttributeDescription> attributes;
252
VkPipelineVertexInputStateCreateInfo visc;
253
};
254
255
class VKPipeline : public Pipeline {
256
public:
257
VKPipeline(VulkanContext *vulkan, size_t size, PipelineFlags _flags, const char *tag) : vulkan_(vulkan), flags(_flags), tag_(tag) {
258
uboSize_ = (int)size;
259
ubo_ = new uint8_t[uboSize_];
260
vkrDesc = new VKRGraphicsPipelineDesc();
261
}
262
~VKPipeline() {
263
if (pipeline) {
264
pipeline->QueueForDeletion(vulkan_);
265
}
266
for (auto dep : deps) {
267
dep->Release();
268
}
269
delete[] ubo_;
270
vkrDesc->Release();
271
}
272
273
void SetDynamicUniformData(const void *data, size_t size) {
274
_dbg_assert_((int)size <= uboSize_);
275
memcpy(ubo_, data, size);
276
}
277
278
// Returns the binding offset, and the VkBuffer to bind.
279
size_t PushUBO(VulkanPushPool *buf, VulkanContext *vulkan, VkBuffer *vkbuf) {
280
return buf->Push(ubo_, uboSize_, vulkan->GetPhysicalDeviceProperties().properties.limits.minUniformBufferOffsetAlignment, vkbuf);
281
}
282
283
int GetUBOSize() const {
284
return uboSize_;
285
}
286
287
VKRGraphicsPipeline *pipeline = nullptr;
288
VKRGraphicsPipelineDesc *vkrDesc = nullptr;
289
PipelineFlags flags;
290
291
std::vector<VKShaderModule *> deps;
292
293
int stride = 0;
294
int dynamicUniformSize = 0;
295
296
bool usesStencil = false;
297
298
private:
299
VulkanContext *vulkan_;
300
uint8_t *ubo_;
301
int uboSize_;
302
std::string tag_;
303
};
304
305
class VKTexture;
306
class VKBuffer;
307
class VKSamplerState;
308
309
enum {
310
MAX_BOUND_TEXTURES = MAX_TEXTURE_SLOTS,
311
};
312
313
struct DescriptorSetKey {
314
VkImageView imageViews_[MAX_BOUND_TEXTURES];
315
VKSamplerState *samplers_[MAX_BOUND_TEXTURES];
316
VkBuffer buffer_;
317
318
bool operator < (const DescriptorSetKey &other) const {
319
for (int i = 0; i < MAX_BOUND_TEXTURES; ++i) {
320
if (imageViews_[i] < other.imageViews_[i]) return true; else if (imageViews_[i] > other.imageViews_[i]) return false;
321
if (samplers_[i] < other.samplers_[i]) return true; else if (samplers_[i] > other.samplers_[i]) return false;
322
}
323
if (buffer_ < other.buffer_) return true; else if (buffer_ > other.buffer_) return false;
324
return false;
325
}
326
};
327
328
class VKTexture : public Texture {
329
public:
330
VKTexture(VulkanContext *vulkan, const TextureDesc &desc)
331
: vulkan_(vulkan), mipLevels_(desc.mipLevels) {
332
format_ = desc.format;
333
}
334
bool Create(VkCommandBuffer cmd, VulkanBarrierBatch *postBarriers, VulkanPushPool *pushBuffer, const TextureDesc &desc);
335
void Update(VkCommandBuffer cmd, VulkanBarrierBatch *postBarriers, VulkanPushPool *pushBuffer, const uint8_t *const *data, TextureCallback callback, int numLevels);
336
337
~VKTexture() {
338
Destroy();
339
}
340
341
VkImageView GetImageView() {
342
if (vkTex_) {
343
return vkTex_->GetImageView();
344
}
345
return VK_NULL_HANDLE; // This would be bad.
346
}
347
348
VkImageView GetImageArrayView() {
349
if (vkTex_) {
350
return vkTex_->GetImageArrayView();
351
}
352
return VK_NULL_HANDLE; // This would be bad.
353
}
354
355
int NumLevels() const {
356
return mipLevels_;
357
}
358
359
private:
360
void UpdateInternal(VkCommandBuffer cmd, VulkanPushPool *pushBuffer, const uint8_t *const *data, TextureCallback callback, int numLevels);
361
362
void Destroy() {
363
if (vkTex_) {
364
vkTex_->Destroy();
365
delete vkTex_;
366
vkTex_ = nullptr;
367
}
368
}
369
370
VulkanContext *vulkan_;
371
VulkanTexture *vkTex_ = nullptr;
372
373
int mipLevels_ = 0;
374
};
375
376
class VKFramebuffer;
377
378
class VKContext : public DrawContext {
379
public:
380
VKContext(VulkanContext *vulkan, bool useRenderThread);
381
~VKContext();
382
383
BackendState GetCurrentBackendState() const override {
384
return BackendState{
385
(u32)renderManager_.GetNumSteps(),
386
true, // Means that the other value is meaningful.
387
};
388
}
389
390
void DebugAnnotate(const char *annotation) override;
391
void Wait() override {
392
vkDeviceWaitIdle(vulkan_->GetDevice());
393
}
394
395
const DeviceCaps &GetDeviceCaps() const override {
396
return caps_;
397
}
398
std::vector<std::string> GetDeviceList() const override {
399
std::vector<std::string> list;
400
for (int i = 0; i < vulkan_->GetNumPhysicalDevices(); i++) {
401
list.emplace_back(vulkan_->GetPhysicalDeviceProperties(i).properties.deviceName);
402
}
403
return list;
404
}
405
std::vector<std::string> GetPresentModeList(std::string_view currentMarkerString) const override {
406
std::vector<std::string> list;
407
for (auto mode : vulkan_->GetAvailablePresentModes()) {
408
std::string str = VulkanPresentModeToString(mode);
409
if (mode == vulkan_->GetPresentMode()) {
410
str += std::string(" (") + std::string(currentMarkerString) + ")";
411
}
412
list.push_back(str);
413
}
414
return list;
415
}
416
std::vector<std::string> GetSurfaceFormatList() const override {
417
std::vector<std::string> list;
418
for (auto &format : vulkan_->SurfaceFormats()) {
419
std::string str = StringFromFormat("%s : %s", VulkanFormatToString(format.format), VulkanColorSpaceToString(format.colorSpace));
420
list.push_back(str);
421
}
422
return list;
423
}
424
425
uint32_t GetSupportedShaderLanguages() const override {
426
return (uint32_t)ShaderLanguage::GLSL_VULKAN;
427
}
428
uint32_t GetDataFormatSupport(DataFormat fmt) const override;
429
430
PresentMode GetPresentMode() const {
431
switch (vulkan_->GetPresentMode()) {
432
case VK_PRESENT_MODE_FIFO_KHR: return PresentMode::FIFO;
433
case VK_PRESENT_MODE_FIFO_RELAXED_KHR: return PresentMode::FIFO; // We treat is as FIFO for now (and won't ever enable it anyway...)
434
case VK_PRESENT_MODE_IMMEDIATE_KHR: return PresentMode::IMMEDIATE;
435
case VK_PRESENT_MODE_MAILBOX_KHR: return PresentMode::MAILBOX;
436
default: return PresentMode::FIFO;
437
}
438
}
439
440
DepthStencilState *CreateDepthStencilState(const DepthStencilStateDesc &desc) override;
441
BlendState *CreateBlendState(const BlendStateDesc &desc) override;
442
InputLayout *CreateInputLayout(const InputLayoutDesc &desc) override;
443
SamplerState *CreateSamplerState(const SamplerStateDesc &desc) override;
444
RasterState *CreateRasterState(const RasterStateDesc &desc) override;
445
Pipeline *CreateGraphicsPipeline(const PipelineDesc &desc, const char *tag) override;
446
ShaderModule *CreateShaderModule(ShaderStage stage, ShaderLanguage language, const uint8_t *data, size_t dataSize, const char *tag) override;
447
448
Texture *CreateTexture(const TextureDesc &desc) override;
449
Buffer *CreateBuffer(size_t size, uint32_t usageFlags) override;
450
Framebuffer *CreateFramebuffer(const FramebufferDesc &desc) override;
451
452
void UpdateBuffer(Buffer *buffer, const uint8_t *data, size_t offset, size_t size, UpdateBufferFlags flags) override;
453
void UpdateTextureLevels(Texture *texture, const uint8_t **data, TextureCallback initDataCallback, int numLevels) override;
454
455
void CopyFramebufferImage(Framebuffer *src, int level, int x, int y, int z, Framebuffer *dst, int dstLevel, int dstX, int dstY, int dstZ, int width, int height, int depth, Aspect aspects, const char *tag) override;
456
bool BlitFramebuffer(Framebuffer *src, int srcX1, int srcY1, int srcX2, int srcY2, Framebuffer *dst, int dstX1, int dstY1, int dstX2, int dstY2, Aspect aspects, FBBlitFilter filter, const char *tag) override;
457
bool CopyFramebufferToMemory(Framebuffer *src, Aspect aspects, int x, int y, int w, int h, Draw::DataFormat format, void *pixels, int pixelStride, ReadbackMode mode, const char *tag) override;
458
DataFormat PreferredFramebufferReadbackFormat(Framebuffer *src) override;
459
460
// These functions should be self explanatory.
461
void BindFramebufferAsRenderTarget(Framebuffer *fbo, const RenderPassInfo &rp, const char *tag) override;
462
void BindFramebufferAsTexture(Framebuffer *fbo, int binding, Aspect channelBit, int layer) override;
463
464
void GetFramebufferDimensions(Framebuffer *fbo, int *w, int *h) override;
465
466
void SetScissorRect(int left, int top, int width, int height) override;
467
void SetViewport(const Viewport &viewport) override;
468
void SetBlendFactor(float color[4]) override;
469
void SetStencilParams(uint8_t refValue, uint8_t writeMask, uint8_t compareMask) override;
470
471
void BindSamplerStates(int start, int count, SamplerState **state) override;
472
void BindTextures(int start, int count, Texture **textures, TextureBindFlags flags) override;
473
void BindNativeTexture(int sampler, void *nativeTexture) override;
474
475
void BindPipeline(Pipeline *pipeline) override {
476
curPipeline_ = (VKPipeline *)pipeline;
477
}
478
479
void BindVertexBuffer(Buffer *vertexBuffer, int offset) override {
480
curVBuffer_ = (VKBuffer *)vertexBuffer;
481
curVBufferOffset_ = offset;
482
}
483
void BindIndexBuffer(Buffer *indexBuffer, int offset) override {
484
curIBuffer_ = (VKBuffer *)indexBuffer;
485
curIBufferOffset_ = offset;
486
}
487
488
void UpdateDynamicUniformBuffer(const void *ub, size_t size) override;
489
490
// TODO: Add more sophisticated draws.
491
void Draw(int vertexCount, int offset) override;
492
void DrawIndexed(int vertexCount, int offset) override;
493
void DrawUP(const void *vdata, int vertexCount) override;
494
void DrawIndexedUP(const void *vdata, int vertexCount, const void *idata, int indexCount) override;
495
// Specialized for quick IMGUI drawing.
496
void DrawIndexedClippedBatchUP(const void *vdata, int vertexCount, const void *idata, int indexCount, Slice<ClippedDraw>, const void *dynUniforms, size_t size) override;
497
498
void BindCurrentPipeline();
499
void ApplyDynamicState();
500
501
void Clear(Aspect aspects, uint32_t colorval, float depthVal, int stencilVal) override;
502
503
void BeginFrame(DebugFlags debugFlags) override;
504
void EndFrame() override;
505
void Present(PresentMode presentMode, int vblanks) override;
506
507
int GetFrameCount() override {
508
return frameCount_;
509
}
510
511
void FlushState() override {}
512
513
void ResetStats() override {
514
renderManager_.ResetStats();
515
}
516
void StopThreads() override {
517
renderManager_.StopThreads();
518
}
519
520
void StartThreads() override {
521
renderManager_.StartThreads();
522
}
523
524
525
std::string GetInfoString(InfoField info) const override {
526
// TODO: Make these actually query the right information
527
switch (info) {
528
case InfoField::APINAME: return "Vulkan";
529
case InfoField::VENDORSTRING: return vulkan_->GetPhysicalDeviceProperties().properties.deviceName;
530
case InfoField::VENDOR: return VulkanVendorString(vulkan_->GetPhysicalDeviceProperties().properties.vendorID);
531
case InfoField::DRIVER: return FormatDriverVersion(vulkan_->GetPhysicalDeviceProperties().properties);
532
case InfoField::SHADELANGVERSION: return "N/A";;
533
case InfoField::APIVERSION: return FormatAPIVersion(vulkan_->InstanceApiVersion());
534
case InfoField::DEVICE_API_VERSION: return FormatAPIVersion(vulkan_->DeviceApiVersion());
535
default: return "?";
536
}
537
}
538
539
void BindDescriptors(VkBuffer buffer, PackedDescriptor descriptors[4]);
540
541
std::vector<std::string> GetFeatureList() const override;
542
std::vector<std::string> GetExtensionList(bool device, bool enabledOnly) const override;
543
544
uint64_t GetNativeObject(NativeObject obj, void *srcObject) override;
545
546
void HandleEvent(Event ev, int width, int height, void *param1, void *param2) override;
547
548
void Invalidate(InvalidationFlags flags) override;
549
550
void InvalidateFramebuffer(FBInvalidationStage stage, Aspect aspects) override;
551
552
void SetInvalidationCallback(InvalidationCallback callback) override {
553
renderManager_.SetInvalidationCallback(callback);
554
}
555
556
std::string GetGpuProfileString() const override {
557
return renderManager_.GetGpuProfileString();
558
}
559
560
private:
561
VulkanTexture *GetNullTexture();
562
VulkanContext *vulkan_ = nullptr;
563
564
int frameCount_ = 0;
565
VulkanRenderManager renderManager_;
566
567
VulkanTexture *nullTexture_ = nullptr;
568
569
AutoRef<VKPipeline> curPipeline_;
570
AutoRef<VKBuffer> curVBuffer_;
571
int curVBufferOffset_ = 0;
572
AutoRef<VKBuffer> curIBuffer_;
573
int curIBufferOffset_ = 0;
574
575
VKRPipelineLayout *pipelineLayout_ = nullptr;
576
VkPipelineCache pipelineCache_ = VK_NULL_HANDLE;
577
AutoRef<VKFramebuffer> curFramebuffer_;
578
579
VkDevice device_;
580
581
enum {
582
MAX_FRAME_COMMAND_BUFFERS = 256,
583
};
584
AutoRef<VKTexture> boundTextures_[MAX_BOUND_TEXTURES];
585
AutoRef<VKSamplerState> boundSamplers_[MAX_BOUND_TEXTURES];
586
VkImageView boundImageView_[MAX_BOUND_TEXTURES]{};
587
TextureBindFlags boundTextureFlags_[MAX_BOUND_TEXTURES]{};
588
589
VulkanPushPool *push_ = nullptr;
590
591
DeviceCaps caps_{};
592
593
uint8_t stencilRef_ = 0;
594
uint8_t stencilWriteMask_ = 0xFF;
595
uint8_t stencilCompareMask_ = 0xFF;
596
};
597
598
// Bits per pixel, not bytes.
599
// VERY incomplete!
600
static int GetBpp(VkFormat format) {
601
switch (format) {
602
case VK_FORMAT_R32_SFLOAT:
603
case VK_FORMAT_R8G8B8A8_UNORM:
604
case VK_FORMAT_B8G8R8A8_UNORM:
605
return 32;
606
case VK_FORMAT_R8_UNORM:
607
case VK_FORMAT_S8_UINT:
608
return 8;
609
case VK_FORMAT_R8G8_UNORM:
610
case VK_FORMAT_R16_SFLOAT:
611
case VK_FORMAT_R16_UNORM:
612
return 16;
613
case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
614
case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
615
case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
616
case VK_FORMAT_R5G6B5_UNORM_PACK16:
617
case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
618
case VK_FORMAT_B5G6R5_UNORM_PACK16:
619
case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
620
return 16;
621
case VK_FORMAT_D24_UNORM_S8_UINT:
622
return 32;
623
case VK_FORMAT_D16_UNORM:
624
return 16;
625
case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
626
return 4;
627
case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
628
case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
629
return 8;
630
case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
631
return 8;
632
case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
633
return 4;
634
case VK_FORMAT_BC2_UNORM_BLOCK:
635
case VK_FORMAT_BC3_UNORM_BLOCK:
636
case VK_FORMAT_BC4_UNORM_BLOCK:
637
case VK_FORMAT_BC5_UNORM_BLOCK:
638
case VK_FORMAT_BC7_UNORM_BLOCK:
639
return 8;
640
default:
641
return 0;
642
}
643
}
644
645
static VkFormat DataFormatToVulkan(DataFormat format) {
646
switch (format) {
647
case DataFormat::D16: return VK_FORMAT_D16_UNORM;
648
case DataFormat::D16_S8: return VK_FORMAT_D16_UNORM_S8_UINT;
649
case DataFormat::D24_S8: return VK_FORMAT_D24_UNORM_S8_UINT;
650
case DataFormat::D32F: return VK_FORMAT_D32_SFLOAT;
651
case DataFormat::D32F_S8: return VK_FORMAT_D32_SFLOAT_S8_UINT;
652
case DataFormat::S8: return VK_FORMAT_S8_UINT;
653
654
case DataFormat::R16_UNORM: return VK_FORMAT_R16_UNORM;
655
656
case DataFormat::R16_FLOAT: return VK_FORMAT_R16_SFLOAT;
657
case DataFormat::R16G16_FLOAT: return VK_FORMAT_R16G16_SFLOAT;
658
case DataFormat::R16G16B16A16_FLOAT: return VK_FORMAT_R16G16B16A16_SFLOAT;
659
case DataFormat::R8_UNORM: return VK_FORMAT_R8_UNORM;
660
case DataFormat::R8G8_UNORM: return VK_FORMAT_R8G8_UNORM;
661
case DataFormat::R8G8B8_UNORM: return VK_FORMAT_R8G8B8_UNORM;
662
case DataFormat::R8G8B8A8_UNORM: return VK_FORMAT_R8G8B8A8_UNORM;
663
case DataFormat::R4G4_UNORM_PACK8: return VK_FORMAT_R4G4_UNORM_PACK8;
664
665
// Note: A4R4G4B4_UNORM_PACK16 is not supported.
666
case DataFormat::R4G4B4A4_UNORM_PACK16: return VK_FORMAT_R4G4B4A4_UNORM_PACK16;
667
case DataFormat::B4G4R4A4_UNORM_PACK16: return VK_FORMAT_B4G4R4A4_UNORM_PACK16;
668
case DataFormat::R5G5B5A1_UNORM_PACK16: return VK_FORMAT_R5G5B5A1_UNORM_PACK16;
669
case DataFormat::B5G5R5A1_UNORM_PACK16: return VK_FORMAT_B5G5R5A1_UNORM_PACK16;
670
case DataFormat::R5G6B5_UNORM_PACK16: return VK_FORMAT_R5G6B5_UNORM_PACK16;
671
case DataFormat::B5G6R5_UNORM_PACK16: return VK_FORMAT_B5G6R5_UNORM_PACK16;
672
case DataFormat::A1R5G5B5_UNORM_PACK16: return VK_FORMAT_A1R5G5B5_UNORM_PACK16;
673
674
case DataFormat::R32_FLOAT: return VK_FORMAT_R32_SFLOAT;
675
case DataFormat::R32G32_FLOAT: return VK_FORMAT_R32G32_SFLOAT;
676
case DataFormat::R32G32B32_FLOAT: return VK_FORMAT_R32G32B32_SFLOAT;
677
case DataFormat::R32G32B32A32_FLOAT: return VK_FORMAT_R32G32B32A32_SFLOAT;
678
679
case DataFormat::BC1_RGBA_UNORM_BLOCK: return VK_FORMAT_BC1_RGBA_UNORM_BLOCK;
680
case DataFormat::BC2_UNORM_BLOCK: return VK_FORMAT_BC2_UNORM_BLOCK;
681
case DataFormat::BC3_UNORM_BLOCK: return VK_FORMAT_BC3_UNORM_BLOCK;
682
case DataFormat::BC4_UNORM_BLOCK: return VK_FORMAT_BC4_UNORM_BLOCK;
683
case DataFormat::BC5_UNORM_BLOCK: return VK_FORMAT_BC5_UNORM_BLOCK;
684
case DataFormat::BC7_UNORM_BLOCK: return VK_FORMAT_BC7_UNORM_BLOCK;
685
686
case DataFormat::ETC2_R8G8B8A1_UNORM_BLOCK: return VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK;
687
case DataFormat::ETC2_R8G8B8A8_UNORM_BLOCK: return VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
688
case DataFormat::ETC2_R8G8B8_UNORM_BLOCK: return VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK;
689
690
case DataFormat::ASTC_4x4_UNORM_BLOCK: return VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
691
692
default:
693
return VK_FORMAT_UNDEFINED;
694
}
695
}
696
697
static inline VkSamplerAddressMode AddressModeToVulkan(Draw::TextureAddressMode mode) {
698
switch (mode) {
699
case TextureAddressMode::CLAMP_TO_BORDER: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
700
case TextureAddressMode::CLAMP_TO_EDGE: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
701
case TextureAddressMode::REPEAT_MIRROR: return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
702
default:
703
case TextureAddressMode::REPEAT: return VK_SAMPLER_ADDRESS_MODE_REPEAT;
704
}
705
}
706
707
VulkanTexture *VKContext::GetNullTexture() {
708
if (!nullTexture_) {
709
VkCommandBuffer cmdInit = renderManager_.GetInitCmd();
710
nullTexture_ = new VulkanTexture(vulkan_, "Null");
711
int w = 8;
712
int h = 8;
713
VulkanBarrierBatch barrier;
714
nullTexture_->CreateDirect(w, h, 1, 1, VK_FORMAT_A8B8G8R8_UNORM_PACK32, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
715
VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT, &barrier);
716
barrier.Flush(cmdInit);
717
uint32_t bindOffset;
718
VkBuffer bindBuf;
719
uint32_t *data = (uint32_t *)push_->Allocate(w * h * 4, 4, &bindBuf, &bindOffset);
720
_assert_(data != nullptr);
721
for (int y = 0; y < h; y++) {
722
for (int x = 0; x < w; x++) {
723
// data[y*w + x] = ((x ^ y) & 1) ? 0xFF808080 : 0xFF000000; // gray/black checkerboard
724
data[y*w + x] = 0; // black
725
}
726
}
727
TextureCopyBatch batch;
728
nullTexture_->CopyBufferToMipLevel(cmdInit, &batch, 0, w, h, 0, bindBuf, bindOffset, w);
729
nullTexture_->FinishCopyBatch(cmdInit, &batch);
730
nullTexture_->EndCreate(cmdInit, false, VK_PIPELINE_STAGE_TRANSFER_BIT);
731
}
732
return nullTexture_;
733
}
734
735
class VKSamplerState : public SamplerState {
736
public:
737
VKSamplerState(VulkanContext *vulkan, const SamplerStateDesc &desc) : vulkan_(vulkan) {
738
VkSamplerCreateInfo s = { VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO };
739
s.addressModeU = AddressModeToVulkan(desc.wrapU);
740
s.addressModeV = AddressModeToVulkan(desc.wrapV);
741
s.addressModeW = AddressModeToVulkan(desc.wrapW);
742
s.anisotropyEnable = desc.maxAniso > 1.0f;
743
s.maxAnisotropy = desc.maxAniso;
744
s.magFilter = desc.magFilter == TextureFilter::LINEAR ? VK_FILTER_LINEAR : VK_FILTER_NEAREST;
745
s.minFilter = desc.minFilter == TextureFilter::LINEAR ? VK_FILTER_LINEAR : VK_FILTER_NEAREST;
746
s.mipmapMode = desc.mipFilter == TextureFilter::LINEAR ? VK_SAMPLER_MIPMAP_MODE_LINEAR : VK_SAMPLER_MIPMAP_MODE_NEAREST;
747
s.maxLod = VK_LOD_CLAMP_NONE;
748
VkResult res = vkCreateSampler(vulkan_->GetDevice(), &s, nullptr, &sampler_);
749
_assert_(VK_SUCCESS == res);
750
}
751
~VKSamplerState() {
752
vulkan_->Delete().QueueDeleteSampler(sampler_);
753
}
754
755
VkSampler GetSampler() { return sampler_; }
756
757
private:
758
VulkanContext *vulkan_;
759
VkSampler sampler_;
760
};
761
762
SamplerState *VKContext::CreateSamplerState(const SamplerStateDesc &desc) {
763
return new VKSamplerState(vulkan_, desc);
764
}
765
766
RasterState *VKContext::CreateRasterState(const RasterStateDesc &desc) {
767
return new VKRasterState(desc);
768
}
769
770
void VKContext::BindSamplerStates(int start, int count, SamplerState **state) {
771
_assert_(start + count <= MAX_BOUND_TEXTURES);
772
for (int i = start; i < start + count; i++) {
773
boundSamplers_[i] = (VKSamplerState *)state[i - start];
774
}
775
}
776
777
enum class TextureState {
778
UNINITIALIZED,
779
STAGED,
780
INITIALIZED,
781
PENDING_DESTRUCTION,
782
};
783
784
bool VKTexture::Create(VkCommandBuffer cmd, VulkanBarrierBatch *postBarriers, VulkanPushPool *pushBuffer, const TextureDesc &desc) {
785
// Zero-sized textures not allowed.
786
_assert_(desc.width * desc.height * desc.depth > 0); // remember to set depth to 1!
787
if (desc.width * desc.height * desc.depth <= 0) {
788
ERROR_LOG(Log::G3D, "Bad texture dimensions %dx%dx%d", desc.width, desc.height, desc.depth);
789
return false;
790
}
791
_dbg_assert_(pushBuffer);
792
format_ = desc.format;
793
mipLevels_ = desc.mipLevels;
794
width_ = desc.width;
795
height_ = desc.height;
796
depth_ = desc.depth;
797
vkTex_ = new VulkanTexture(vulkan_, desc.tag);
798
VkFormat vulkanFormat = DataFormatToVulkan(format_);
799
int usageBits = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
800
if (mipLevels_ > (int)desc.initData.size()) {
801
// Gonna have to generate some, which requires TRANSFER_SRC
802
usageBits |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
803
}
804
805
VkComponentMapping r8AsAlpha[4] = { {VK_COMPONENT_SWIZZLE_ONE, VK_COMPONENT_SWIZZLE_ONE, VK_COMPONENT_SWIZZLE_ONE, VK_COMPONENT_SWIZZLE_R} };
806
VkComponentMapping r8AsColor[4] = { {VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_ONE} };
807
808
VkComponentMapping *swizzle = nullptr;
809
switch (desc.swizzle) {
810
case TextureSwizzle::R8_AS_ALPHA: swizzle = r8AsAlpha; break;
811
case TextureSwizzle::R8_AS_GRAYSCALE: swizzle = r8AsColor; break;
812
case TextureSwizzle::DEFAULT:
813
break;
814
}
815
VulkanBarrierBatch barrier;
816
if (!vkTex_->CreateDirect(width_, height_, 1, mipLevels_, vulkanFormat, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, usageBits, &barrier, swizzle)) {
817
ERROR_LOG(Log::G3D, "Failed to create VulkanTexture: %dx%dx%d fmt %d, %d levels", width_, height_, depth_, (int)vulkanFormat, mipLevels_);
818
return false;
819
}
820
barrier.Flush(cmd);
821
VkImageLayout layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
822
if (desc.initData.size()) {
823
UpdateInternal(cmd, pushBuffer, desc.initData.data(), desc.initDataCallback, (int)desc.initData.size());
824
// Generate the rest of the mips automatically.
825
if ((int)desc.initData.size() < mipLevels_) {
826
vkTex_->GenerateMips(cmd, (int)desc.initData.size(), false);
827
layout = VK_IMAGE_LAYOUT_GENERAL;
828
}
829
}
830
vkTex_->EndCreate(cmd, false, VK_PIPELINE_STAGE_TRANSFER_BIT, layout);
831
return true;
832
}
833
834
void VKTexture::Update(VkCommandBuffer cmd, VulkanBarrierBatch *postBarriers, VulkanPushPool *pushBuffer, const uint8_t * const *data, TextureCallback initDataCallback, int numLevels) {
835
// Before we can use UpdateInternal, we need to transition the image to the same state as after CreateDirect,
836
// making it ready for writing.
837
vkTex_->PrepareForTransferDst(cmd, numLevels);
838
UpdateInternal(cmd, pushBuffer, data, initDataCallback, numLevels);
839
vkTex_->RestoreAfterTransferDst(numLevels, postBarriers);
840
}
841
842
void VKTexture::UpdateInternal(VkCommandBuffer cmd, VulkanPushPool *pushBuffer, const uint8_t * const *data, TextureCallback initDataCallback, int numLevels) {
843
int w = width_;
844
int h = height_;
845
int d = depth_;
846
VkFormat vulkanFormat = DataFormatToVulkan(format_);
847
int bpp = GetBpp(vulkanFormat);
848
_dbg_assert_(bpp != 0);
849
int bytesPerPixel = bpp / 8;
850
TextureCopyBatch batch;
851
batch.reserve(numLevels);
852
for (int i = 0; i < numLevels; i++) {
853
uint32_t offset;
854
VkBuffer buf;
855
size_t size = w * h * d * bytesPerPixel;
856
uint8_t *dest = (uint8_t *)pushBuffer->Allocate(size, 16, &buf, &offset);
857
if (initDataCallback) {
858
_assert_(dest != nullptr);
859
if (!initDataCallback(dest, data[i], w, h, d, w * bytesPerPixel, h * w * bytesPerPixel)) {
860
memcpy(dest, data[i], size);
861
}
862
} else {
863
memcpy(dest, data[i], size);
864
}
865
vkTex_->CopyBufferToMipLevel(cmd, &batch, i, w, h, 0, buf, offset, w);
866
w = (w + 1) / 2;
867
h = (h + 1) / 2;
868
d = (d + 1) / 2;
869
}
870
vkTex_->FinishCopyBatch(cmd, &batch);
871
}
872
873
static DataFormat DataFormatFromVulkanDepth(VkFormat fmt) {
874
switch (fmt) {
875
case VK_FORMAT_D24_UNORM_S8_UINT:
876
return DataFormat::D24_S8;
877
case VK_FORMAT_D16_UNORM:
878
return DataFormat::D16;
879
case VK_FORMAT_D32_SFLOAT:
880
return DataFormat::D32F;
881
case VK_FORMAT_D32_SFLOAT_S8_UINT:
882
return DataFormat::D32F_S8;
883
case VK_FORMAT_D16_UNORM_S8_UINT:
884
return DataFormat::D16_S8;
885
default:
886
break;
887
}
888
889
return DataFormat::UNDEFINED;
890
}
891
892
VKContext::VKContext(VulkanContext *vulkan, bool useRenderThread)
893
: vulkan_(vulkan), renderManager_(vulkan, useRenderThread, frameTimeHistory_) {
894
shaderLanguageDesc_.Init(GLSL_VULKAN);
895
896
caps_.coordConvention = CoordConvention::Vulkan;
897
caps_.setMaxFrameLatencySupported = true;
898
caps_.anisoSupported = vulkan->GetDeviceFeatures().enabled.standard.samplerAnisotropy != 0;
899
caps_.geometryShaderSupported = vulkan->GetDeviceFeatures().enabled.standard.geometryShader != 0;
900
caps_.tesselationShaderSupported = vulkan->GetDeviceFeatures().enabled.standard.tessellationShader != 0;
901
caps_.dualSourceBlend = vulkan->GetDeviceFeatures().enabled.standard.dualSrcBlend != 0;
902
caps_.depthClampSupported = vulkan->GetDeviceFeatures().enabled.standard.depthClamp != 0;
903
904
// Comment out these two to test geometry shader culling on any geometry shader-supporting hardware.
905
caps_.clipDistanceSupported = vulkan->GetDeviceFeatures().enabled.standard.shaderClipDistance != 0;
906
caps_.cullDistanceSupported = vulkan->GetDeviceFeatures().enabled.standard.shaderCullDistance != 0;
907
908
caps_.framebufferBlitSupported = true;
909
caps_.framebufferCopySupported = true;
910
caps_.framebufferDepthBlitSupported = vulkan->GetDeviceInfo().canBlitToPreferredDepthStencilFormat;
911
caps_.framebufferStencilBlitSupported = caps_.framebufferDepthBlitSupported;
912
caps_.framebufferDepthCopySupported = true; // Will pretty much always be the case.
913
caps_.framebufferSeparateDepthCopySupported = true; // Will pretty much always be the case.
914
// This doesn't affect what depth/stencil format is actually used, see VulkanQueueRunner.
915
caps_.preferredDepthBufferFormat = DataFormatFromVulkanDepth(vulkan->GetDeviceInfo().preferredDepthStencilFormat);
916
caps_.texture3DSupported = true;
917
caps_.textureDepthSupported = true;
918
caps_.fragmentShaderInt32Supported = true;
919
caps_.textureNPOTFullySupported = true;
920
caps_.fragmentShaderDepthWriteSupported = true;
921
caps_.fragmentShaderStencilWriteSupported = vulkan->Extensions().EXT_shader_stencil_export;
922
caps_.blendMinMaxSupported = true;
923
caps_.logicOpSupported = vulkan->GetDeviceFeatures().enabled.standard.logicOp != 0;
924
caps_.multiViewSupported = vulkan->GetDeviceFeatures().enabled.multiview.multiview != 0;
925
caps_.sampleRateShadingSupported = vulkan->GetDeviceFeatures().enabled.standard.sampleRateShading != 0;
926
caps_.textureSwizzleSupported = true;
927
928
// Note that it must also be enabled on the pipelines (which we do).
929
caps_.provokingVertexLast = vulkan->GetDeviceFeatures().enabled.provokingVertex.provokingVertexLast;
930
931
// Present mode stuff
932
caps_.presentMaxInterval = 1;
933
caps_.presentInstantModeChange = false; // TODO: Fix this with some work in VulkanContext
934
caps_.presentModesSupported = (PresentMode)0;
935
for (auto mode : vulkan->GetAvailablePresentModes()) {
936
switch (mode) {
937
case VK_PRESENT_MODE_FIFO_KHR: caps_.presentModesSupported |= PresentMode::FIFO; break;
938
case VK_PRESENT_MODE_IMMEDIATE_KHR: caps_.presentModesSupported |= PresentMode::IMMEDIATE; break;
939
case VK_PRESENT_MODE_MAILBOX_KHR: caps_.presentModesSupported |= PresentMode::MAILBOX; break;
940
default: break; // Ignore any other modes.
941
}
942
}
943
944
const auto &limits = vulkan->GetPhysicalDeviceProperties().properties.limits;
945
946
auto deviceProps = vulkan->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDeviceIndex()).properties;
947
948
switch (deviceProps.vendorID) {
949
case VULKAN_VENDOR_AMD: caps_.vendor = GPUVendor::VENDOR_AMD; break;
950
case VULKAN_VENDOR_ARM: caps_.vendor = GPUVendor::VENDOR_ARM; break;
951
case VULKAN_VENDOR_IMGTEC: caps_.vendor = GPUVendor::VENDOR_IMGTEC; break;
952
case VULKAN_VENDOR_NVIDIA: caps_.vendor = GPUVendor::VENDOR_NVIDIA; break;
953
case VULKAN_VENDOR_QUALCOMM: caps_.vendor = GPUVendor::VENDOR_QUALCOMM; break;
954
case VULKAN_VENDOR_INTEL: caps_.vendor = GPUVendor::VENDOR_INTEL; break;
955
case VULKAN_VENDOR_APPLE: caps_.vendor = GPUVendor::VENDOR_APPLE; break;
956
case VULKAN_VENDOR_MESA: caps_.vendor = GPUVendor::VENDOR_MESA; break;
957
default:
958
WARN_LOG(Log::G3D, "Unknown vendor ID %08x", deviceProps.vendorID);
959
caps_.vendor = GPUVendor::VENDOR_UNKNOWN;
960
break;
961
}
962
963
switch (caps_.vendor) {
964
case GPUVendor::VENDOR_ARM:
965
case GPUVendor::VENDOR_IMGTEC:
966
case GPUVendor::VENDOR_QUALCOMM:
967
caps_.isTilingGPU = true;
968
break;
969
default:
970
caps_.isTilingGPU = false;
971
break;
972
}
973
974
if (caps_.vendor == GPUVendor::VENDOR_IMGTEC) {
975
// Enable some things that cut down pipeline counts but may have other costs.
976
caps_.verySlowShaderCompiler = true;
977
}
978
979
// VkSampleCountFlagBits is arranged correctly for our purposes.
980
// Only support MSAA levels that have support for all three of color, depth, stencil.
981
982
bool multisampleAllowed = true;
983
984
caps_.deviceID = deviceProps.deviceID;
985
986
if (caps_.vendor == GPUVendor::VENDOR_QUALCOMM) {
987
if (caps_.deviceID < 0x6000000) { // On sub 6xx series GPUs, disallow multisample.
988
INFO_LOG(Log::G3D, "Multisampling was disabled due to old driver version (Adreno)");
989
multisampleAllowed = false;
990
}
991
992
// Adreno 5xx devices, all known driver versions, fail to discard stencil when depth write is off.
993
// See: https://github.com/hrydgard/ppsspp/pull/11684
994
if (deviceProps.deviceID >= 0x05000000 && deviceProps.deviceID < 0x06000000) {
995
if (deviceProps.driverVersion < 0x80180000) {
996
bugs_.Infest(Bugs::NO_DEPTH_CANNOT_DISCARD_STENCIL_ADRENO);
997
}
998
}
999
// Color write mask not masking write in certain scenarios with a depth test, see #10421.
1000
// Known still present on driver 0x80180000 and Adreno 5xx (possibly more.)
1001
// Known working on driver 0x801EA000 and Adreno 620.
1002
if (deviceProps.driverVersion < 0x801EA000 || deviceProps.deviceID < 0x06000000)
1003
bugs_.Infest(Bugs::COLORWRITEMASK_BROKEN_WITH_DEPTHTEST);
1004
1005
// Trying to follow all the rules in https://registry.khronos.org/vulkan/specs/1.3/html/vkspec.html#synchronization-pipeline-barriers-subpass-self-dependencies
1006
// and https://registry.khronos.org/vulkan/specs/1.3/html/vkspec.html#renderpass-feedbackloop, but still it doesn't
1007
// quite work - artifacts on triangle boundaries on Adreno.
1008
bugs_.Infest(Bugs::SUBPASS_FEEDBACK_BROKEN);
1009
} else if (caps_.vendor == GPUVendor::VENDOR_AMD) {
1010
// See issue #10074, and also #10065 (AMD) and #10109 for the choice of the driver version to check for.
1011
if (deviceProps.driverVersion < 0x00407000) {
1012
bugs_.Infest(Bugs::DUAL_SOURCE_BLENDING_BROKEN);
1013
}
1014
} else if (caps_.vendor == GPUVendor::VENDOR_INTEL) {
1015
// Workaround for Intel driver bug. TODO: Re-enable after some driver version
1016
bugs_.Infest(Bugs::DUAL_SOURCE_BLENDING_BROKEN);
1017
} else if (caps_.vendor == GPUVendor::VENDOR_ARM) {
1018
// Really old Vulkan drivers for Mali didn't have proper versions. We try to detect that (can't be 100% but pretty good).
1019
bool isOldVersion = IsHashMaliDriverVersion(deviceProps);
1020
1021
int majorVersion = VK_API_VERSION_MAJOR(deviceProps.driverVersion);
1022
1023
// These GPUs (up to some certain hardware version?) have a bug where draws where gl_Position.w == .z
1024
// corrupt the depth buffer. This is easily worked around by simply scaling Z down a tiny bit when this case
1025
// is detected. See: https://github.com/hrydgard/ppsspp/issues/11937
1026
bugs_.Infest(Bugs::EQUAL_WZ_CORRUPTS_DEPTH);
1027
1028
// Nearly identical to the the Adreno bug, see #13833 (Midnight Club map broken) and other issues.
1029
// It has the additional caveat that combining depth writes with NEVER depth tests crashes the driver.
1030
// Reported fixed in major version 40 - let's add a check once confirmed.
1031
bugs_.Infest(Bugs::NO_DEPTH_CANNOT_DISCARD_STENCIL_MALI);
1032
1033
// This started in driver 31 or 32, fixed in 40 - let's add a check once confirmed.
1034
if (majorVersion >= 32) {
1035
bugs_.Infest(Bugs::MALI_CONSTANT_LOAD_BUG); // See issue #15661
1036
}
1037
1038
// Older ARM devices have very slow geometry shaders, not worth using. At least before 15.
1039
// Also seen to cause weird issues on 18, so let's lump it in.
1040
if (majorVersion <= 18 || isOldVersion) {
1041
bugs_.Infest(Bugs::GEOMETRY_SHADERS_SLOW_OR_BROKEN);
1042
}
1043
1044
// Attempt to workaround #17386
1045
if (isOldVersion) {
1046
if (!strcmp(deviceProps.deviceName, "Mali-T880") ||
1047
!strcmp(deviceProps.deviceName, "Mali-T860") ||
1048
!strcmp(deviceProps.deviceName, "Mali-T830")) {
1049
bugs_.Infest(Bugs::UNIFORM_INDEXING_BROKEN);
1050
}
1051
}
1052
1053
if (isOldVersion) {
1054
// Very rough heuristic.
1055
multisampleAllowed = false;
1056
}
1057
} else if (caps_.vendor == GPUVendor::VENDOR_IMGTEC) {
1058
// Not sure about driver versions, so let's just ban, impact is tiny.
1059
bugs_.Infest(Bugs::PVR_BAD_16BIT_TEXFORMATS);
1060
}
1061
1062
if (!vulkan->Extensions().KHR_depth_stencil_resolve) {
1063
INFO_LOG(Log::G3D, "KHR_depth_stencil_resolve not supported, disabling multisampling");
1064
multisampleAllowed = false;
1065
}
1066
1067
if (!vulkan->Extensions().KHR_create_renderpass2) {
1068
WARN_LOG(Log::G3D, "KHR_create_renderpass2 not supported, disabling multisampling");
1069
multisampleAllowed = false;
1070
} else {
1071
_dbg_assert_(vkCreateRenderPass2 != nullptr);
1072
}
1073
1074
// We limit multisampling functionality to reasonably recent and known-good tiling GPUs.
1075
if (multisampleAllowed) {
1076
// Check for depth stencil resolve. Without it, depth textures won't work, and we don't want that mess
1077
// of compatibility reports, so we'll just disable multisampling in this case for now.
1078
// There are potential workarounds for devices that don't support it, but those are nearly non-existent now.
1079
const auto &resolveProperties = vulkan->GetPhysicalDeviceProperties().depthStencilResolve;
1080
if (((resolveProperties.supportedDepthResolveModes & resolveProperties.supportedStencilResolveModes) & VK_RESOLVE_MODE_SAMPLE_ZERO_BIT) != 0) {
1081
caps_.multiSampleLevelsMask = (limits.framebufferColorSampleCounts & limits.framebufferDepthSampleCounts & limits.framebufferStencilSampleCounts);
1082
INFO_LOG(Log::G3D, "Multisample levels mask: %d", caps_.multiSampleLevelsMask);
1083
} else {
1084
INFO_LOG(Log::G3D, "Not enough depth/stencil resolve modes supported, disabling multisampling. Color: %d Depth: %d Stencil: %d",
1085
limits.framebufferColorSampleCounts, limits.framebufferDepthSampleCounts, limits.framebufferStencilSampleCounts);
1086
caps_.multiSampleLevelsMask = 1;
1087
}
1088
} else {
1089
caps_.multiSampleLevelsMask = 1;
1090
}
1091
1092
// Vulkan can support this through input attachments and various extensions, but not worth
1093
// the trouble.
1094
caps_.framebufferFetchSupported = false;
1095
1096
device_ = vulkan->GetDevice();
1097
1098
VkBufferUsageFlags usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1099
push_ = new VulkanPushPool(vulkan_, "pushBuffer", 4 * 1024 * 1024, usage);
1100
1101
// binding 0 - uniform data
1102
// binding 1 - combined sampler/image 0
1103
// binding 2 - combined sampler/image 1
1104
// ...etc
1105
BindingType bindings[MAX_BOUND_TEXTURES + 1];
1106
bindings[0] = BindingType::UNIFORM_BUFFER_DYNAMIC_ALL;
1107
for (int i = 0; i < MAX_BOUND_TEXTURES; ++i) {
1108
bindings[1 + i] = BindingType::COMBINED_IMAGE_SAMPLER;
1109
}
1110
pipelineLayout_ = renderManager_.CreatePipelineLayout(bindings, ARRAY_SIZE(bindings), caps_.geometryShaderSupported, "thin3d_layout");
1111
1112
VkPipelineCacheCreateInfo pc{ VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO };
1113
VkResult res = vkCreatePipelineCache(vulkan_->GetDevice(), &pc, nullptr, &pipelineCache_);
1114
_assert_(VK_SUCCESS == res);
1115
}
1116
1117
VKContext::~VKContext() {
1118
DestroyPresets();
1119
1120
delete nullTexture_;
1121
push_->Destroy();
1122
delete push_;
1123
renderManager_.DestroyPipelineLayout(pipelineLayout_);
1124
vulkan_->Delete().QueueDeletePipelineCache(pipelineCache_);
1125
}
1126
1127
void VKContext::BeginFrame(DebugFlags debugFlags) {
1128
renderManager_.BeginFrame(debugFlags & DebugFlags::PROFILE_TIMESTAMPS, debugFlags & DebugFlags::PROFILE_SCOPES);
1129
push_->BeginFrame();
1130
}
1131
1132
void VKContext::EndFrame() {
1133
// Do all the work to submit the command buffers etc.
1134
renderManager_.Finish();
1135
// Unbind stuff, to avoid accidentally relying on it across frames (and provide some protection against forgotten unbinds of deleted things).
1136
Invalidate(InvalidationFlags::CACHED_RENDER_STATE);
1137
}
1138
1139
void VKContext::Present(PresentMode presentMode, int vblanks) {
1140
if (presentMode == PresentMode::FIFO) {
1141
_dbg_assert_(vblanks == 0 || vblanks == 1);
1142
}
1143
renderManager_.Present();
1144
frameCount_++;
1145
}
1146
1147
void VKContext::Invalidate(InvalidationFlags flags) {
1148
if (flags & InvalidationFlags::CACHED_RENDER_STATE) {
1149
curPipeline_ = nullptr;
1150
1151
for (auto &view : boundImageView_) {
1152
view = VK_NULL_HANDLE;
1153
}
1154
for (auto &sampler : boundSamplers_) {
1155
sampler = nullptr;
1156
}
1157
for (auto &texture : boundTextures_) {
1158
texture = nullptr;
1159
}
1160
}
1161
}
1162
1163
void VKContext::BindDescriptors(VkBuffer buf, PackedDescriptor descriptors[4]) {
1164
descriptors[0].buffer.buffer = buf;
1165
descriptors[0].buffer.offset = 0; // dynamic
1166
descriptors[0].buffer.range = curPipeline_->GetUBOSize();
1167
1168
for (int i = 0; i < MAX_BOUND_TEXTURES; ++i) {
1169
VkImageView view;
1170
VkSampler sampler;
1171
if (boundTextures_[i]) {
1172
view = (boundTextureFlags_[i] & TextureBindFlags::VULKAN_BIND_ARRAY) ? boundTextures_[i]->GetImageArrayView() : boundTextures_[i]->GetImageView();
1173
} else {
1174
view = boundImageView_[i];
1175
}
1176
sampler = boundSamplers_[i] ? boundSamplers_[i]->GetSampler() : VK_NULL_HANDLE;
1177
1178
if (view && sampler) {
1179
descriptors[i + 1].image.view = view;
1180
descriptors[i + 1].image.sampler = sampler;
1181
} else {
1182
descriptors[i + 1].image.view = VK_NULL_HANDLE;
1183
descriptors[i + 1].image.sampler = VK_NULL_HANDLE;
1184
}
1185
}
1186
}
1187
1188
Pipeline *VKContext::CreateGraphicsPipeline(const PipelineDesc &desc, const char *tag) {
1189
VKInputLayout *input = (VKInputLayout *)desc.inputLayout;
1190
VKBlendState *blend = (VKBlendState *)desc.blend;
1191
VKDepthStencilState *depth = (VKDepthStencilState *)desc.depthStencil;
1192
VKRasterState *raster = (VKRasterState *)desc.raster;
1193
1194
PipelineFlags pipelineFlags = (PipelineFlags)0;
1195
if (depth->info.depthTestEnable || depth->info.stencilTestEnable) {
1196
pipelineFlags |= PipelineFlags::USES_DEPTH_STENCIL;
1197
}
1198
// TODO: We need code to set USES_BLEND_CONSTANT here too, if we're ever gonna use those in thin3d code.
1199
1200
VKPipeline *pipeline = new VKPipeline(vulkan_, desc.uniformDesc ? desc.uniformDesc->uniformBufferSize : 16 * sizeof(float), pipelineFlags, tag);
1201
1202
VKRGraphicsPipelineDesc &gDesc = *pipeline->vkrDesc;
1203
1204
std::vector<VkPipelineShaderStageCreateInfo> stages;
1205
stages.resize(desc.shaders.size());
1206
1207
for (auto &iter : desc.shaders) {
1208
VKShaderModule *vkshader = (VKShaderModule *)iter;
1209
vkshader->AddRef();
1210
pipeline->deps.push_back(vkshader);
1211
if (vkshader->GetStage() == ShaderStage::Vertex) {
1212
gDesc.vertexShader = vkshader->Get();
1213
} else if (vkshader->GetStage() == ShaderStage::Fragment) {
1214
gDesc.fragmentShader = vkshader->Get();
1215
} else {
1216
ERROR_LOG(Log::G3D, "Bad stage");
1217
delete pipeline;
1218
return nullptr;
1219
}
1220
}
1221
1222
_dbg_assert_(input);
1223
_dbg_assert_((int)input->attributes.size() == (int)input->visc.vertexAttributeDescriptionCount);
1224
1225
pipeline->stride = input->binding.stride;
1226
gDesc.ibd = input->binding;
1227
for (size_t i = 0; i < input->attributes.size(); i++) {
1228
gDesc.attrs[i] = input->attributes[i];
1229
}
1230
gDesc.vis.vertexAttributeDescriptionCount = input->visc.vertexAttributeDescriptionCount;
1231
gDesc.vis.vertexBindingDescriptionCount = input->visc.vertexBindingDescriptionCount;
1232
gDesc.vis.pVertexBindingDescriptions = &gDesc.ibd;
1233
gDesc.vis.pVertexAttributeDescriptions = gDesc.attrs;
1234
1235
gDesc.blend0 = blend->attachments[0];
1236
gDesc.cbs = blend->info;
1237
gDesc.cbs.pAttachments = &gDesc.blend0;
1238
1239
gDesc.dss = depth->info;
1240
1241
// Copy bindings from input layout.
1242
gDesc.topology = primToVK[(int)desc.prim];
1243
1244
// We treat the three stencil states as a unit in other places, so let's do that here too.
1245
const VkDynamicState dynamics[] = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR, VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, VK_DYNAMIC_STATE_STENCIL_REFERENCE, VK_DYNAMIC_STATE_STENCIL_WRITE_MASK };
1246
gDesc.ds.dynamicStateCount = depth->info.stencilTestEnable ? ARRAY_SIZE(dynamics) : 2;
1247
for (size_t i = 0; i < gDesc.ds.dynamicStateCount; i++) {
1248
gDesc.dynamicStates[i] = dynamics[i];
1249
}
1250
gDesc.ds.pDynamicStates = gDesc.dynamicStates;
1251
1252
gDesc.views.viewportCount = 1;
1253
gDesc.views.scissorCount = 1;
1254
gDesc.views.pViewports = nullptr; // dynamic
1255
gDesc.views.pScissors = nullptr; // dynamic
1256
1257
gDesc.pipelineLayout = pipelineLayout_;
1258
1259
raster->ToVulkan(&gDesc.rs);
1260
1261
if (renderManager_.GetVulkanContext()->GetDeviceFeatures().enabled.provokingVertex.provokingVertexLast) {
1262
ChainStruct(gDesc.rs, &gDesc.rs_provoking);
1263
gDesc.rs_provoking.provokingVertexMode = VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT;
1264
}
1265
1266
pipeline->pipeline = renderManager_.CreateGraphicsPipeline(&gDesc, pipelineFlags, 1 << (size_t)RenderPassType::BACKBUFFER, VK_SAMPLE_COUNT_1_BIT, false, tag ? tag : "thin3d");
1267
1268
if (desc.uniformDesc) {
1269
pipeline->dynamicUniformSize = (int)desc.uniformDesc->uniformBufferSize;
1270
}
1271
if (depth->info.stencilTestEnable) {
1272
pipeline->usesStencil = true;
1273
}
1274
return pipeline;
1275
}
1276
1277
void VKContext::SetScissorRect(int left, int top, int width, int height) {
1278
renderManager_.SetScissor(left, top, width, height);
1279
}
1280
1281
void VKContext::SetViewport(const Viewport &viewport) {
1282
// Ignore viewports more than the first.
1283
VkViewport vkViewport;
1284
vkViewport.x = viewport.TopLeftX;
1285
vkViewport.y = viewport.TopLeftY;
1286
vkViewport.width = viewport.Width;
1287
vkViewport.height = viewport.Height;
1288
vkViewport.minDepth = viewport.MinDepth;
1289
vkViewport.maxDepth = viewport.MaxDepth;
1290
renderManager_.SetViewport(vkViewport);
1291
}
1292
1293
void VKContext::SetBlendFactor(float color[4]) {
1294
uint32_t col = Float4ToUint8x4(color);
1295
renderManager_.SetBlendFactor(col);
1296
}
1297
1298
void VKContext::SetStencilParams(uint8_t refValue, uint8_t writeMask, uint8_t compareMask) {
1299
if (curPipeline_->usesStencil)
1300
renderManager_.SetStencilParams(writeMask, compareMask, refValue);
1301
stencilRef_ = refValue;
1302
stencilWriteMask_ = writeMask;
1303
stencilCompareMask_ = compareMask;
1304
}
1305
1306
InputLayout *VKContext::CreateInputLayout(const InputLayoutDesc &desc) {
1307
VKInputLayout *vl = new VKInputLayout();
1308
vl->visc = { VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO };
1309
vl->visc.flags = 0;
1310
vl->visc.vertexBindingDescriptionCount = 1;
1311
vl->visc.vertexAttributeDescriptionCount = (uint32_t)desc.attributes.size();
1312
vl->attributes.resize(vl->visc.vertexAttributeDescriptionCount);
1313
vl->visc.pVertexBindingDescriptions = &vl->binding;
1314
vl->visc.pVertexAttributeDescriptions = vl->attributes.data();
1315
for (size_t i = 0; i < desc.attributes.size(); i++) {
1316
vl->attributes[i].binding = 0;
1317
vl->attributes[i].format = DataFormatToVulkan(desc.attributes[i].format);
1318
vl->attributes[i].location = desc.attributes[i].location;
1319
vl->attributes[i].offset = desc.attributes[i].offset;
1320
}
1321
vl->binding.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
1322
vl->binding.binding = 0;
1323
vl->binding.stride = desc.stride;
1324
return vl;
1325
}
1326
1327
Texture *VKContext::CreateTexture(const TextureDesc &desc) {
1328
VkCommandBuffer initCmd = renderManager_.GetInitCmd();
1329
if (!push_ || !initCmd) {
1330
// Too early! Fail.
1331
ERROR_LOG(Log::G3D, "Can't create textures before the first frame has started.");
1332
return nullptr;
1333
}
1334
VKTexture *tex = new VKTexture(vulkan_, desc);
1335
if (tex->Create(initCmd, &renderManager_.PostInitBarrier(), push_, desc)) {
1336
return tex;
1337
} else {
1338
ERROR_LOG(Log::G3D, "Failed to create texture");
1339
tex->Release();
1340
return nullptr;
1341
}
1342
}
1343
1344
void VKContext::UpdateTextureLevels(Texture *texture, const uint8_t **data, TextureCallback initDataCallback, int numLevels) {
1345
VkCommandBuffer initCmd = renderManager_.GetInitCmd();
1346
if (!push_ || !initCmd) {
1347
// Too early! Fail.
1348
ERROR_LOG(Log::G3D, "Can't create textures before the first frame has started.");
1349
return;
1350
}
1351
1352
VKTexture *tex = (VKTexture *)texture;
1353
1354
_dbg_assert_(numLevels <= tex->NumLevels());
1355
tex->Update(initCmd, &renderManager_.PostInitBarrier(), push_, data, initDataCallback, numLevels);
1356
}
1357
1358
static inline void CopySide(VkStencilOpState &dest, const StencilSetup &src) {
1359
dest.compareOp = compToVK[(int)src.compareOp];
1360
dest.failOp = stencilOpToVK[(int)src.failOp];
1361
dest.passOp = stencilOpToVK[(int)src.passOp];
1362
dest.depthFailOp = stencilOpToVK[(int)src.depthFailOp];
1363
}
1364
1365
DepthStencilState *VKContext::CreateDepthStencilState(const DepthStencilStateDesc &desc) {
1366
VKDepthStencilState *ds = new VKDepthStencilState();
1367
ds->info.depthCompareOp = compToVK[(int)desc.depthCompare];
1368
ds->info.depthTestEnable = desc.depthTestEnabled;
1369
ds->info.depthWriteEnable = desc.depthWriteEnabled;
1370
ds->info.stencilTestEnable = desc.stencilEnabled;
1371
ds->info.depthBoundsTestEnable = false;
1372
if (ds->info.stencilTestEnable) {
1373
CopySide(ds->info.front, desc.stencil);
1374
CopySide(ds->info.back, desc.stencil);
1375
}
1376
return ds;
1377
}
1378
1379
BlendState *VKContext::CreateBlendState(const BlendStateDesc &desc) {
1380
VKBlendState *bs = new VKBlendState();
1381
bs->info.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
1382
bs->info.attachmentCount = 1;
1383
bs->info.logicOp = logicOpToVK[(int)desc.logicOp];
1384
bs->info.logicOpEnable = desc.logicEnabled;
1385
bs->attachments.resize(1);
1386
bs->attachments[0].blendEnable = desc.enabled;
1387
bs->attachments[0].colorBlendOp = blendEqToVk[(int)desc.eqCol];
1388
bs->attachments[0].alphaBlendOp = blendEqToVk[(int)desc.eqAlpha];
1389
bs->attachments[0].colorWriteMask = desc.colorMask;
1390
bs->attachments[0].dstAlphaBlendFactor = blendFactorToVk[(int)desc.dstAlpha];
1391
bs->attachments[0].dstColorBlendFactor = blendFactorToVk[(int)desc.dstCol];
1392
bs->attachments[0].srcAlphaBlendFactor = blendFactorToVk[(int)desc.srcAlpha];
1393
bs->attachments[0].srcColorBlendFactor = blendFactorToVk[(int)desc.srcCol];
1394
bs->info.pAttachments = bs->attachments.data();
1395
return bs;
1396
}
1397
1398
// Very simplistic buffer that will simply copy its contents into our "pushbuffer" when it's time to draw,
1399
// to avoid synchronization issues.
1400
class VKBuffer : public Buffer {
1401
public:
1402
VKBuffer(size_t size) : dataSize_(size) {
1403
data_ = new uint8_t[size];
1404
}
1405
~VKBuffer() {
1406
delete[] data_;
1407
}
1408
1409
size_t GetSize() const { return dataSize_; }
1410
const uint8_t *GetData() const { return data_; }
1411
1412
uint8_t *data_;
1413
size_t dataSize_;
1414
};
1415
1416
Buffer *VKContext::CreateBuffer(size_t size, uint32_t usageFlags) {
1417
return new VKBuffer(size);
1418
}
1419
1420
void VKContext::UpdateBuffer(Buffer *buffer, const uint8_t *data, size_t offset, size_t size, UpdateBufferFlags flags) {
1421
VKBuffer *buf = (VKBuffer *)buffer;
1422
memcpy(buf->data_ + offset, data, size);
1423
}
1424
1425
void VKContext::BindTextures(int start, int count, Texture **textures, TextureBindFlags flags) {
1426
_assert_(start + count <= MAX_BOUND_TEXTURES);
1427
for (int i = start; i < start + count; i++) {
1428
_dbg_assert_(i >= 0 && i < MAX_BOUND_TEXTURES);
1429
boundTextures_[i] = static_cast<VKTexture *>(textures[i - start]);
1430
boundTextureFlags_[i] = flags;
1431
if (boundTextures_[i]) {
1432
// If a texture is bound, we set these up in BindDescriptors too.
1433
// But we might need to set the view here anyway so it can be queried using GetNativeObject.
1434
if (flags & TextureBindFlags::VULKAN_BIND_ARRAY) {
1435
boundImageView_[i] = boundTextures_[i]->GetImageArrayView();
1436
} else {
1437
boundImageView_[i] = boundTextures_[i]->GetImageView();
1438
}
1439
} else {
1440
if (flags & TextureBindFlags::VULKAN_BIND_ARRAY) {
1441
boundImageView_[i] = GetNullTexture()->GetImageArrayView();
1442
} else {
1443
boundImageView_[i] = GetNullTexture()->GetImageView();
1444
}
1445
}
1446
}
1447
}
1448
1449
void VKContext::BindNativeTexture(int sampler, void *nativeTexture) {
1450
_dbg_assert_(sampler >= 0 && sampler < MAX_BOUND_TEXTURES);
1451
boundTextures_[sampler] = nullptr;
1452
boundImageView_[sampler] = (VkImageView)nativeTexture;
1453
}
1454
1455
ShaderModule *VKContext::CreateShaderModule(ShaderStage stage, ShaderLanguage language, const uint8_t *data, size_t size, const char *tag) {
1456
VKShaderModule *shader = new VKShaderModule(stage, tag);
1457
if (shader->Compile(vulkan_, data, size)) {
1458
return shader;
1459
} else {
1460
ERROR_LOG(Log::G3D, "Failed to compile shader %s:\n%s", tag, (const char *)LineNumberString((const char *)data).c_str());
1461
shader->Release();
1462
return nullptr;
1463
}
1464
}
1465
1466
void VKContext::UpdateDynamicUniformBuffer(const void *ub, size_t size) {
1467
curPipeline_->SetDynamicUniformData(ub, size);
1468
}
1469
1470
void VKContext::ApplyDynamicState() {
1471
// TODO: blend constants, stencil, viewports should be here, after bindpipeline..
1472
if (curPipeline_->usesStencil) {
1473
renderManager_.SetStencilParams(stencilWriteMask_, stencilCompareMask_, stencilRef_);
1474
}
1475
}
1476
1477
void VKContext::Draw(int vertexCount, int offset) {
1478
VKBuffer *vbuf = curVBuffer_;
1479
1480
VkBuffer vulkanVbuf;
1481
VkBuffer vulkanUBObuf;
1482
uint32_t ubo_offset = (uint32_t)curPipeline_->PushUBO(push_, vulkan_, &vulkanUBObuf);
1483
size_t vbBindOffset = push_->Push(vbuf->GetData(), vbuf->GetSize(), 4, &vulkanVbuf);
1484
1485
BindCurrentPipeline();
1486
ApplyDynamicState();
1487
int descSetIndex;
1488
PackedDescriptor *descriptors = renderManager_.PushDescriptorSet(4, &descSetIndex);
1489
BindDescriptors(vulkanUBObuf, descriptors);
1490
renderManager_.Draw(descSetIndex, 1, &ubo_offset, vulkanVbuf, (int)vbBindOffset + curVBufferOffset_, vertexCount, offset);
1491
}
1492
1493
void VKContext::DrawIndexed(int vertexCount, int offset) {
1494
VKBuffer *ibuf = curIBuffer_;
1495
VKBuffer *vbuf = curVBuffer_;
1496
1497
VkBuffer vulkanVbuf, vulkanIbuf, vulkanUBObuf;
1498
uint32_t ubo_offset = (uint32_t)curPipeline_->PushUBO(push_, vulkan_, &vulkanUBObuf);
1499
size_t vbBindOffset = push_->Push(vbuf->GetData(), vbuf->GetSize(), 4, &vulkanVbuf);
1500
size_t ibBindOffset = push_->Push(ibuf->GetData(), ibuf->GetSize(), 4, &vulkanIbuf);
1501
1502
BindCurrentPipeline();
1503
ApplyDynamicState();
1504
int descSetIndex;
1505
PackedDescriptor *descriptors = renderManager_.PushDescriptorSet(4, &descSetIndex);
1506
BindDescriptors(vulkanUBObuf, descriptors);
1507
renderManager_.DrawIndexed(descSetIndex, 1, &ubo_offset, vulkanVbuf, (int)vbBindOffset + curVBufferOffset_, vulkanIbuf, (int)ibBindOffset + offset * sizeof(uint32_t), vertexCount, 1);
1508
}
1509
1510
void VKContext::DrawUP(const void *vdata, int vertexCount) {
1511
_dbg_assert_(vertexCount >= 0);
1512
if (vertexCount <= 0) {
1513
return;
1514
}
1515
1516
VkBuffer vulkanVbuf, vulkanUBObuf;
1517
size_t dataSize = vertexCount * curPipeline_->stride;
1518
uint32_t vbBindOffset;
1519
uint8_t *dataPtr = push_->Allocate(dataSize, 4, &vulkanVbuf, &vbBindOffset);
1520
_assert_(dataPtr != nullptr);
1521
memcpy(dataPtr, vdata, dataSize);
1522
1523
uint32_t ubo_offset = (uint32_t)curPipeline_->PushUBO(push_, vulkan_, &vulkanUBObuf);
1524
1525
BindCurrentPipeline();
1526
ApplyDynamicState();
1527
int descSetIndex;
1528
PackedDescriptor *descriptors = renderManager_.PushDescriptorSet(4, &descSetIndex);
1529
BindDescriptors(vulkanUBObuf, descriptors);
1530
renderManager_.Draw(descSetIndex, 1, &ubo_offset, vulkanVbuf, (int)vbBindOffset, vertexCount);
1531
}
1532
1533
void VKContext::DrawIndexedUP(const void *vdata, int vertexCount, const void *idata, int indexCount) {
1534
_dbg_assert_(vertexCount >= 0);
1535
_dbg_assert_(indexCount >= 0);
1536
if (vertexCount <= 0 || indexCount <= 0) {
1537
return;
1538
}
1539
1540
VkBuffer vulkanVbuf, vulkanIbuf, vulkanUBObuf;
1541
size_t vdataSize = vertexCount * curPipeline_->stride;
1542
uint32_t vbBindOffset;
1543
uint8_t *vdataPtr = push_->Allocate(vdataSize, 4, &vulkanVbuf, &vbBindOffset);
1544
_assert_(vdataPtr != nullptr);
1545
memcpy(vdataPtr, vdata, vdataSize);
1546
1547
size_t idataSize = indexCount * sizeof(u16);
1548
uint32_t ibBindOffset;
1549
uint8_t *idataPtr = push_->Allocate(idataSize, 4, &vulkanIbuf, &ibBindOffset);
1550
_assert_(idataPtr != nullptr);
1551
memcpy(idataPtr, vdata, idataSize);
1552
1553
uint32_t ubo_offset = (uint32_t)curPipeline_->PushUBO(push_, vulkan_, &vulkanUBObuf);
1554
1555
BindCurrentPipeline();
1556
ApplyDynamicState();
1557
int descSetIndex;
1558
PackedDescriptor *descriptors = renderManager_.PushDescriptorSet(4, &descSetIndex);
1559
BindDescriptors(vulkanUBObuf, descriptors);
1560
renderManager_.DrawIndexed(descSetIndex, 1, &ubo_offset, vulkanVbuf, (int)vbBindOffset, vulkanIbuf, (int)ibBindOffset, indexCount, 1);
1561
}
1562
1563
void VKContext::DrawIndexedClippedBatchUP(const void *vdata, int vertexCount, const void *idata, int indexCount, Slice<ClippedDraw> draws, const void *ub, size_t ubSize) {
1564
_dbg_assert_(vertexCount >= 0);
1565
_dbg_assert_(indexCount >= 0);
1566
if (vertexCount <= 0 || indexCount <= 0 || draws.is_empty()) {
1567
return;
1568
}
1569
1570
curPipeline_ = (VKPipeline *)draws[0].pipeline;
1571
1572
VkBuffer vulkanVbuf, vulkanIbuf, vulkanUBObuf;
1573
size_t vdataSize = vertexCount * curPipeline_->stride;
1574
uint32_t vbBindOffset;
1575
uint8_t *vdataPtr = push_->Allocate(vdataSize, 4, &vulkanVbuf, &vbBindOffset);
1576
_assert_(vdataPtr != nullptr);
1577
memcpy(vdataPtr, vdata, vdataSize);
1578
1579
constexpr int indexSize = sizeof(u16);
1580
1581
size_t idataSize = indexCount * indexSize;
1582
uint32_t ibBindOffset;
1583
uint8_t *idataPtr = push_->Allocate(idataSize, 4, &vulkanIbuf, &ibBindOffset);
1584
_assert_(idataPtr != nullptr);
1585
memcpy(idataPtr, idata, idataSize);
1586
1587
curPipeline_->SetDynamicUniformData(ub, ubSize);
1588
1589
uint32_t ubo_offset = (uint32_t)curPipeline_->PushUBO(push_, vulkan_, &vulkanUBObuf);
1590
1591
BindCurrentPipeline();
1592
ApplyDynamicState();
1593
1594
for (auto &draw : draws) {
1595
if (draw.pipeline != curPipeline_) {
1596
VKPipeline *vkPipe = (VKPipeline *)draw.pipeline;
1597
renderManager_.BindPipeline(vkPipe->pipeline, vkPipe->flags, pipelineLayout_);
1598
curPipeline_ = (VKPipeline *)draw.pipeline;
1599
curPipeline_->SetDynamicUniformData(ub, ubSize);
1600
}
1601
// TODO: Dirty-check these.
1602
if (draw.bindTexture) {
1603
BindTexture(0, draw.bindTexture);
1604
} else if (draw.bindFramebufferAsTex) {
1605
BindFramebufferAsTexture(draw.bindFramebufferAsTex, 0, draw.aspect, 0);
1606
} else if (draw.bindNativeTexture) {
1607
BindNativeTexture(0, draw.bindNativeTexture);
1608
}
1609
Draw::SamplerState *sstate = draw.samplerState;
1610
BindSamplerStates(0, 1, &sstate);
1611
int descSetIndex;
1612
PackedDescriptor *descriptors = renderManager_.PushDescriptorSet(4, &descSetIndex);
1613
BindDescriptors(vulkanUBObuf, descriptors);
1614
renderManager_.SetScissor(draw.clipx, draw.clipy, draw.clipw, draw.cliph);
1615
renderManager_.DrawIndexed(descSetIndex, 1, &ubo_offset, vulkanVbuf, (int)vbBindOffset, vulkanIbuf,
1616
(int)ibBindOffset + draw.indexOffset * indexSize, draw.indexCount, 1);
1617
}
1618
}
1619
1620
void VKContext::BindCurrentPipeline() {
1621
renderManager_.BindPipeline(curPipeline_->pipeline, curPipeline_->flags, pipelineLayout_);
1622
}
1623
1624
void VKContext::Clear(Aspect aspects, uint32_t colorval, float depthVal, int stencilVal) {
1625
int mask = 0;
1626
if (aspects & Aspect::COLOR_BIT)
1627
mask |= VK_IMAGE_ASPECT_COLOR_BIT;
1628
if (aspects & Aspect::DEPTH_BIT)
1629
mask |= VK_IMAGE_ASPECT_DEPTH_BIT;
1630
if (aspects & Aspect::STENCIL_BIT)
1631
mask |= VK_IMAGE_ASPECT_STENCIL_BIT;
1632
renderManager_.Clear(colorval, depthVal, stencilVal, mask);
1633
}
1634
1635
DrawContext *T3DCreateVulkanContext(VulkanContext *vulkan, bool useRenderThread) {
1636
return new VKContext(vulkan, useRenderThread);
1637
}
1638
1639
void AddFeature(std::vector<std::string> &features, const char *name, VkBool32 available, VkBool32 enabled) {
1640
char buf[512];
1641
snprintf(buf, sizeof(buf), "%s: Available: %d Enabled: %d", name, (int)available, (int)enabled);
1642
features.push_back(buf);
1643
}
1644
1645
std::vector<std::string> VKContext::GetFeatureList() const {
1646
const VkPhysicalDeviceFeatures &available = vulkan_->GetDeviceFeatures().available.standard;
1647
const VkPhysicalDeviceFeatures &enabled = vulkan_->GetDeviceFeatures().enabled.standard;
1648
1649
std::vector<std::string> features;
1650
AddFeature(features, "dualSrcBlend", available.dualSrcBlend, enabled.dualSrcBlend);
1651
AddFeature(features, "logicOp", available.logicOp, enabled.logicOp);
1652
AddFeature(features, "geometryShader", available.geometryShader, enabled.geometryShader);
1653
AddFeature(features, "depthBounds", available.depthBounds, enabled.depthBounds);
1654
AddFeature(features, "depthClamp", available.depthClamp, enabled.depthClamp);
1655
AddFeature(features, "pipelineStatisticsQuery", available.pipelineStatisticsQuery, enabled.pipelineStatisticsQuery);
1656
AddFeature(features, "samplerAnisotropy", available.samplerAnisotropy, enabled.samplerAnisotropy);
1657
AddFeature(features, "textureCompressionBC", available.textureCompressionBC, enabled.textureCompressionBC);
1658
AddFeature(features, "textureCompressionETC2", available.textureCompressionETC2, enabled.textureCompressionETC2);
1659
AddFeature(features, "textureCompressionASTC_LDR", available.textureCompressionASTC_LDR, enabled.textureCompressionASTC_LDR);
1660
AddFeature(features, "shaderClipDistance", available.shaderClipDistance, enabled.shaderClipDistance);
1661
AddFeature(features, "shaderCullDistance", available.shaderCullDistance, enabled.shaderCullDistance);
1662
AddFeature(features, "occlusionQueryPrecise", available.occlusionQueryPrecise, enabled.occlusionQueryPrecise);
1663
AddFeature(features, "multiDrawIndirect", available.multiDrawIndirect, enabled.multiDrawIndirect);
1664
AddFeature(features, "robustBufferAccess", available.robustBufferAccess, enabled.robustBufferAccess);
1665
AddFeature(features, "fullDrawIndexUint32", available.fullDrawIndexUint32, enabled.fullDrawIndexUint32);
1666
AddFeature(features, "fragmentStoresAndAtomics", available.fragmentStoresAndAtomics, enabled.fragmentStoresAndAtomics);
1667
AddFeature(features, "shaderInt16", available.shaderInt16, enabled.shaderInt16);
1668
1669
AddFeature(features, "multiview", vulkan_->GetDeviceFeatures().available.multiview.multiview, vulkan_->GetDeviceFeatures().enabled.multiview.multiview);
1670
AddFeature(features, "multiviewGeometryShader", vulkan_->GetDeviceFeatures().available.multiview.multiviewGeometryShader, vulkan_->GetDeviceFeatures().enabled.multiview.multiviewGeometryShader);
1671
AddFeature(features, "presentId", vulkan_->GetDeviceFeatures().available.presentId.presentId, vulkan_->GetDeviceFeatures().enabled.presentId.presentId);
1672
AddFeature(features, "presentWait", vulkan_->GetDeviceFeatures().available.presentWait.presentWait, vulkan_->GetDeviceFeatures().enabled.presentWait.presentWait);
1673
AddFeature(features, "provokingVertexLast", vulkan_->GetDeviceFeatures().available.provokingVertex.provokingVertexLast, vulkan_->GetDeviceFeatures().enabled.provokingVertex.provokingVertexLast);
1674
1675
features.emplace_back(std::string("Preferred depth buffer format: ") + VulkanFormatToString(vulkan_->GetDeviceInfo().preferredDepthStencilFormat));
1676
1677
return features;
1678
}
1679
1680
std::vector<std::string> VKContext::GetExtensionList(bool device, bool enabledOnly) const {
1681
std::vector<std::string> extensions;
1682
if (enabledOnly) {
1683
const auto& enabled = (device ? vulkan_->GetDeviceExtensionsEnabled() : vulkan_->GetInstanceExtensionsEnabled());
1684
extensions.reserve(enabled.size());
1685
for (auto &iter : enabled) {
1686
extensions.push_back(iter);
1687
}
1688
} else {
1689
const auto& available = (device ? vulkan_->GetDeviceExtensionsAvailable() : vulkan_->GetInstanceExtensionsAvailable());
1690
extensions.reserve(available.size());
1691
for (auto &iter : available) {
1692
extensions.push_back(iter.extensionName);
1693
}
1694
}
1695
return extensions;
1696
}
1697
1698
uint32_t VKContext::GetDataFormatSupport(DataFormat fmt) const {
1699
VkFormat vulkan_format = DataFormatToVulkan(fmt);
1700
VkFormatProperties properties;
1701
vkGetPhysicalDeviceFormatProperties(vulkan_->GetCurrentPhysicalDevice(), vulkan_format, &properties);
1702
uint32_t flags = 0;
1703
if (properties.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) {
1704
flags |= FMT_RENDERTARGET;
1705
}
1706
if (properties.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
1707
flags |= FMT_DEPTHSTENCIL;
1708
}
1709
if (properties.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) {
1710
flags |= FMT_TEXTURE;
1711
}
1712
if (properties.bufferFeatures & VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT) {
1713
flags |= FMT_INPUTLAYOUT;
1714
}
1715
if ((properties.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_SRC_BIT) && (properties.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_DST_BIT)) {
1716
flags |= FMT_BLIT;
1717
}
1718
if (properties.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) {
1719
flags |= FMT_STORAGE_IMAGE;
1720
}
1721
return flags;
1722
}
1723
1724
// A VKFramebuffer is a VkFramebuffer (note caps difference) plus all the textures it owns.
1725
// It also has a reference to the command buffer that it was last rendered to with.
1726
// If it needs to be transitioned, and the frame number matches, use it, otherwise
1727
// use this frame's init command buffer.
1728
class VKFramebuffer : public Framebuffer {
1729
public:
1730
VKFramebuffer(VKRFramebuffer *fb, int multiSampleLevel) : buf_(fb) {
1731
_assert_msg_(fb, "Null fb in VKFramebuffer constructor");
1732
width_ = fb->width;
1733
height_ = fb->height;
1734
layers_ = fb->numLayers;
1735
multiSampleLevel_ = multiSampleLevel;
1736
}
1737
~VKFramebuffer() {
1738
_assert_msg_(buf_, "Null buf_ in VKFramebuffer - double delete?");
1739
buf_->Vulkan()->Delete().QueueCallback([](VulkanContext *vulkan, void *fb) {
1740
VKRFramebuffer *vfb = static_cast<VKRFramebuffer *>(fb);
1741
delete vfb;
1742
}, buf_);
1743
buf_ = nullptr;
1744
}
1745
VKRFramebuffer *GetFB() const { return buf_; }
1746
void UpdateTag(const char *newTag) override {
1747
buf_->UpdateTag(newTag);
1748
}
1749
const char *Tag() const override {
1750
return buf_->Tag();
1751
}
1752
private:
1753
VKRFramebuffer *buf_;
1754
};
1755
1756
Framebuffer *VKContext::CreateFramebuffer(const FramebufferDesc &desc) {
1757
_assert_(desc.multiSampleLevel >= 0);
1758
_assert_(desc.numLayers > 0);
1759
_assert_(desc.width > 0);
1760
_assert_(desc.height > 0);
1761
1762
VKRFramebuffer *vkrfb = new VKRFramebuffer(vulkan_, &renderManager_.PostInitBarrier(), desc.width, desc.height, desc.numLayers, desc.multiSampleLevel, desc.z_stencil, desc.tag);
1763
return new VKFramebuffer(vkrfb, desc.multiSampleLevel);
1764
}
1765
1766
void VKContext::CopyFramebufferImage(Framebuffer *srcfb, int level, int x, int y, int z, Framebuffer *dstfb, int dstLevel, int dstX, int dstY, int dstZ, int width, int height, int depth, Aspect aspects, const char *tag) {
1767
VKFramebuffer *src = (VKFramebuffer *)srcfb;
1768
VKFramebuffer *dst = (VKFramebuffer *)dstfb;
1769
1770
int aspectMask = 0;
1771
if (aspects & Aspect::COLOR_BIT) aspectMask |= VK_IMAGE_ASPECT_COLOR_BIT;
1772
if (aspects & Aspect::DEPTH_BIT) aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
1773
if (aspects & Aspect::STENCIL_BIT) aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
1774
1775
renderManager_.CopyFramebuffer(src->GetFB(), VkRect2D{ {x, y}, {(uint32_t)width, (uint32_t)height } }, dst->GetFB(), VkOffset2D{ dstX, dstY }, aspectMask, tag);
1776
}
1777
1778
bool VKContext::BlitFramebuffer(Framebuffer *srcfb, int srcX1, int srcY1, int srcX2, int srcY2, Framebuffer *dstfb, int dstX1, int dstY1, int dstX2, int dstY2, Aspect aspects, FBBlitFilter filter, const char *tag) {
1779
VKFramebuffer *src = (VKFramebuffer *)srcfb;
1780
VKFramebuffer *dst = (VKFramebuffer *)dstfb;
1781
1782
int aspectMask = 0;
1783
if (aspects & Aspect::COLOR_BIT) aspectMask |= VK_IMAGE_ASPECT_COLOR_BIT;
1784
if (aspects & Aspect::DEPTH_BIT) aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
1785
if (aspects & Aspect::STENCIL_BIT) aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
1786
1787
renderManager_.BlitFramebuffer(src->GetFB(), VkRect2D{ {srcX1, srcY1}, {(uint32_t)(srcX2 - srcX1), (uint32_t)(srcY2 - srcY1) } }, dst->GetFB(), VkRect2D{ {dstX1, dstY1}, {(uint32_t)(dstX2 - dstX1), (uint32_t)(dstY2 - dstY1) } }, aspectMask, filter == FB_BLIT_LINEAR ? VK_FILTER_LINEAR : VK_FILTER_NEAREST, tag);
1788
return true;
1789
}
1790
1791
bool VKContext::CopyFramebufferToMemory(Framebuffer *srcfb, Aspect aspects, int x, int y, int w, int h, Draw::DataFormat format, void *pixels, int pixelStride, ReadbackMode mode, const char *tag) {
1792
VKFramebuffer *src = (VKFramebuffer *)srcfb;
1793
1794
int aspectMask = 0;
1795
if (aspects & Aspect::COLOR_BIT) aspectMask |= VK_IMAGE_ASPECT_COLOR_BIT;
1796
if (aspects & Aspect::DEPTH_BIT) aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
1797
if (aspects & Aspect::STENCIL_BIT) aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
1798
1799
return renderManager_.CopyFramebufferToMemory(src ? src->GetFB() : nullptr, aspectMask, x, y, w, h, format, (uint8_t *)pixels, pixelStride, mode, tag);
1800
}
1801
1802
DataFormat VKContext::PreferredFramebufferReadbackFormat(Framebuffer *src) {
1803
if (src) {
1804
return DrawContext::PreferredFramebufferReadbackFormat(src);
1805
}
1806
1807
if (vulkan_->GetSwapchainFormat() == VK_FORMAT_B8G8R8A8_UNORM) {
1808
return Draw::DataFormat::B8G8R8A8_UNORM;
1809
}
1810
return DrawContext::PreferredFramebufferReadbackFormat(src);
1811
}
1812
1813
void VKContext::BindFramebufferAsRenderTarget(Framebuffer *fbo, const RenderPassInfo &rp, const char *tag) {
1814
VKFramebuffer *fb = (VKFramebuffer *)fbo;
1815
VKRRenderPassLoadAction color = (VKRRenderPassLoadAction)rp.color;
1816
VKRRenderPassLoadAction depth = (VKRRenderPassLoadAction)rp.depth;
1817
VKRRenderPassLoadAction stencil = (VKRRenderPassLoadAction)rp.stencil;
1818
1819
renderManager_.BindFramebufferAsRenderTarget(fb ? fb->GetFB() : nullptr, color, depth, stencil, rp.clearColor, rp.clearDepth, rp.clearStencil, tag);
1820
curFramebuffer_ = fb;
1821
}
1822
1823
void VKContext::BindFramebufferAsTexture(Framebuffer *fbo, int binding, Aspect channelBit, int layer) {
1824
VKFramebuffer *fb = (VKFramebuffer *)fbo;
1825
_assert_(binding >= 0 && binding < MAX_BOUND_TEXTURES);
1826
1827
// TODO: There are cases where this is okay, actually. But requires layout transitions and stuff -
1828
// we're not ready for this.
1829
_assert_(fb != curFramebuffer_);
1830
1831
int aspect = 0;
1832
switch (channelBit) {
1833
case Aspect::COLOR_BIT:
1834
aspect = VK_IMAGE_ASPECT_COLOR_BIT;
1835
break;
1836
case Aspect::DEPTH_BIT:
1837
aspect = VK_IMAGE_ASPECT_DEPTH_BIT;
1838
break;
1839
default:
1840
// Hm, can we texture from stencil?
1841
_assert_(false);
1842
break;
1843
}
1844
1845
boundTextures_[binding].reset(nullptr);
1846
boundImageView_[binding] = renderManager_.BindFramebufferAsTexture(fb->GetFB(), binding, aspect, layer);
1847
}
1848
1849
void VKContext::GetFramebufferDimensions(Framebuffer *fbo, int *w, int *h) {
1850
VKFramebuffer *fb = (VKFramebuffer *)fbo;
1851
if (fb) {
1852
*w = fb->GetFB()->width;
1853
*h = fb->GetFB()->height;
1854
} else {
1855
*w = vulkan_->GetBackbufferWidth();
1856
*h = vulkan_->GetBackbufferHeight();
1857
}
1858
}
1859
1860
void VKContext::HandleEvent(Event ev, int width, int height, void *param1, void *param2) {
1861
switch (ev) {
1862
case Event::LOST_BACKBUFFER:
1863
renderManager_.DestroyBackbuffers();
1864
break;
1865
case Event::GOT_BACKBUFFER:
1866
renderManager_.CreateBackbuffers();
1867
break;
1868
default:
1869
_assert_(false);
1870
break;
1871
}
1872
}
1873
1874
void VKContext::InvalidateFramebuffer(FBInvalidationStage stage, Aspect aspects) {
1875
VkImageAspectFlags flags = 0;
1876
if (aspects & Aspect::COLOR_BIT)
1877
flags |= VK_IMAGE_ASPECT_COLOR_BIT;
1878
if (aspects & Aspect::DEPTH_BIT)
1879
flags |= VK_IMAGE_ASPECT_DEPTH_BIT;
1880
if (aspects & Aspect::STENCIL_BIT)
1881
flags |= VK_IMAGE_ASPECT_STENCIL_BIT;
1882
if (stage == FB_INVALIDATION_LOAD) {
1883
renderManager_.SetLoadDontCare(flags);
1884
} else if (stage == FB_INVALIDATION_STORE) {
1885
renderManager_.SetStoreDontCare(flags);
1886
}
1887
}
1888
1889
uint64_t VKContext::GetNativeObject(NativeObject obj, void *srcObject) {
1890
switch (obj) {
1891
case NativeObject::CONTEXT:
1892
return (uint64_t)vulkan_;
1893
case NativeObject::INIT_COMMANDBUFFER:
1894
return (uint64_t)renderManager_.GetInitCmd();
1895
case NativeObject::BOUND_TEXTURE0_IMAGEVIEW:
1896
return (uint64_t)boundImageView_[0];
1897
case NativeObject::BOUND_TEXTURE1_IMAGEVIEW:
1898
return (uint64_t)boundImageView_[1];
1899
case NativeObject::RENDER_MANAGER:
1900
return (uint64_t)(uintptr_t)&renderManager_;
1901
case NativeObject::NULL_IMAGEVIEW:
1902
return (uint64_t)GetNullTexture()->GetImageView();
1903
case NativeObject::NULL_IMAGEVIEW_ARRAY:
1904
return (uint64_t)GetNullTexture()->GetImageArrayView();
1905
case NativeObject::TEXTURE_VIEW:
1906
return (uint64_t)(((VKTexture *)srcObject)->GetImageView());
1907
case NativeObject::BOUND_FRAMEBUFFER_COLOR_IMAGEVIEW_ALL_LAYERS:
1908
return (uint64_t)curFramebuffer_->GetFB()->color.texAllLayersView;
1909
case NativeObject::BOUND_FRAMEBUFFER_COLOR_IMAGEVIEW_RT:
1910
return (uint64_t)curFramebuffer_->GetFB()->GetRTView();
1911
case NativeObject::THIN3D_PIPELINE_LAYOUT:
1912
return (uint64_t)pipelineLayout_;
1913
case NativeObject::PUSH_POOL:
1914
return (uint64_t)push_;
1915
default:
1916
Crash();
1917
return 0;
1918
}
1919
}
1920
1921
void VKContext::DebugAnnotate(const char *annotation) {
1922
renderManager_.DebugAnnotate(annotation);
1923
}
1924
1925
} // namespace Draw
1926
1927