vulkan_util.h 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755
  1. #pragma once
  2. #ifdef __APPLE__
  3. const bool isApplePlatform = true;
  4. #else
  5. const bool isApplePlatform = false;
  6. #endif
  7. typedef struct VulkanDevice {
  8. VkPhysicalDevice gpu;
  9. VkPhysicalDeviceProperties gpuProperties;
  10. VkPhysicalDeviceMemoryProperties memoryProperties;
  11. VkQueueFamilyProperties *queueFamilyProperties;
  12. uint32_t queueFamilyPropertiesCount;
  13. uint32_t graphicsQueueFamilyIndex;
  14. VkDevice device;
  15. VkCommandPool commandPool;
  16. } VulkanDevice;
  17. VulkanDevice *createVulkanDevice(VkPhysicalDevice gpu) {
  18. VulkanDevice *device = (VulkanDevice *)malloc(sizeof(VulkanDevice));
  19. memset(device, 0, sizeof(VulkanDevice));
  20. device->gpu = gpu;
  21. vkGetPhysicalDeviceMemoryProperties(gpu, &device->memoryProperties);
  22. vkGetPhysicalDeviceProperties(gpu, &device->gpuProperties);
  23. vkGetPhysicalDeviceQueueFamilyProperties(gpu, &device->queueFamilyPropertiesCount, NULL);
  24. assert(device->queueFamilyPropertiesCount >= 1);
  25. device->queueFamilyProperties = (VkQueueFamilyProperties *)malloc(device->queueFamilyPropertiesCount * sizeof(VkQueueFamilyProperties));
  26. vkGetPhysicalDeviceQueueFamilyProperties(gpu, &device->queueFamilyPropertiesCount, device->queueFamilyProperties);
  27. assert(device->queueFamilyPropertiesCount >= 1);
  28. device->graphicsQueueFamilyIndex = UINT32_MAX;
  29. for (uint32_t i = 0; i < device->queueFamilyPropertiesCount; ++i) {
  30. if ((device->queueFamilyProperties[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
  31. device->graphicsQueueFamilyIndex = i;
  32. }
  33. }
  34. float queuePriorities[1] = {0.0};
  35. VkDeviceQueueCreateInfo queue_info = {VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO};
  36. queue_info.queueCount = 1;
  37. queue_info.pQueuePriorities = queuePriorities;
  38. queue_info.queueFamilyIndex = device->graphicsQueueFamilyIndex;
  39. const char *deviceExtensions[] = {
  40. VK_KHR_SWAPCHAIN_EXTENSION_NAME,
  41. };
  42. VkDeviceCreateInfo deviceInfo = {VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO};
  43. deviceInfo.queueCreateInfoCount = 1;
  44. deviceInfo.pQueueCreateInfos = &queue_info;
  45. deviceInfo.enabledExtensionCount = sizeof(deviceExtensions) / sizeof(deviceExtensions[0]);
  46. deviceInfo.ppEnabledExtensionNames = deviceExtensions;
  47. VkResult res = vkCreateDevice(gpu, &deviceInfo, NULL, &device->device);
  48. assert(res == VK_SUCCESS);
  49. /* Create a command pool to allocate our command buffer from */
  50. VkCommandPoolCreateInfo cmd_pool_info = {VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO};
  51. cmd_pool_info.queueFamilyIndex = device->graphicsQueueFamilyIndex;
  52. cmd_pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
  53. res = vkCreateCommandPool(device->device, &cmd_pool_info, NULL, &device->commandPool);
  54. assert(res == VK_SUCCESS);
  55. return device;
  56. }
  57. void destroyVulkanDevice(VulkanDevice *device) {
  58. free(device->queueFamilyProperties);
  59. if (device->commandPool) {
  60. vkDestroyCommandPool(device->device, device->commandPool, 0);
  61. }
  62. if (device->device) {
  63. vkDestroyDevice(device->device, 0);
  64. }
  65. free(device);
  66. }
  67. typedef struct SwapchainBuffers {
  68. VkImage image;
  69. VkImageView view;
  70. } SwapchainBuffers;
  71. typedef struct DepthBuffer {
  72. VkFormat format;
  73. VkImage image;
  74. VkDeviceMemory mem;
  75. VkImageView view;
  76. } DepthBuffer;
  77. typedef struct FrameBuffers {
  78. VkSwapchainKHR swap_chain;
  79. SwapchainBuffers *swap_chain_buffers;
  80. uint32_t swapchain_image_count;
  81. VkFramebuffer *framebuffers;
  82. uint32_t current_buffer;
  83. uint32_t current_frame;
  84. uint64_t num_swaps;
  85. VkExtent2D buffer_size;
  86. VkRenderPass render_pass;
  87. VkFormat format;
  88. DepthBuffer depth;
  89. VkSemaphore *present_complete_semaphore;
  90. VkSemaphore *render_complete_semaphore;
  91. VkFence *flight_fence;
  92. } FrameBuffers;
  93. static VKAPI_ATTR VkBool32 VKAPI_CALL debugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageType, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, void* pUserData) {
  94. printf("%s\n", pCallbackData->pMessage);
  95. return VK_FALSE;
  96. }
  97. static VkDebugUtilsMessengerEXT debugMessenger = NULL;
  98. #ifdef GLFW_INCLUDE_VULKAN
  99. static VkInstance createVkInstance(bool enable_debug_layer) {
  100. // initialize the VkApplicationInfo structure
  101. VkApplicationInfo app_info = {VK_STRUCTURE_TYPE_APPLICATION_INFO};
  102. app_info.pApplicationName = "NanoVG";
  103. app_info.applicationVersion = 1;
  104. app_info.pEngineName = "NanoVG";
  105. app_info.engineVersion = 1;
  106. app_info.apiVersion = VK_API_VERSION_1_0;
  107. static const char *append_extensions[] = {
  108. VK_EXT_DEBUG_UTILS_EXTENSION_NAME,
  109. };
  110. uint32_t append_extensions_count = sizeof(append_extensions) / sizeof(append_extensions[0]);
  111. if (!enable_debug_layer) {
  112. append_extensions_count = 0;
  113. }
  114. static const char *apple_extensions[] = {
  115. VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME,
  116. };
  117. uint32_t apple_extensions_count = sizeof(apple_extensions) / sizeof(apple_extensions[0]);
  118. if (!isApplePlatform) {
  119. apple_extensions_count = 0;
  120. }
  121. uint32_t extensions_count = 0;
  122. const char **glfw_extensions = glfwGetRequiredInstanceExtensions(&extensions_count);
  123. const char **extensions = (const char **)calloc(extensions_count + append_extensions_count + apple_extensions_count, sizeof(char *));
  124. for (uint32_t i = 0; i < extensions_count; ++i) {
  125. extensions[i] = glfw_extensions[i];
  126. }
  127. for (uint32_t i = 0; i < append_extensions_count; ++i) {
  128. extensions[extensions_count++] = append_extensions[i];
  129. }
  130. for (uint32_t i = 0; i < apple_extensions_count; ++i) {
  131. extensions[extensions_count++] = apple_extensions[i];
  132. }
  133. // initialize the VkInstanceCreateInfo structure
  134. VkInstanceCreateInfo inst_info = {VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO};
  135. inst_info.pApplicationInfo = &app_info;
  136. inst_info.enabledExtensionCount = extensions_count;
  137. inst_info.ppEnabledExtensionNames = extensions;
  138. if (isApplePlatform) {
  139. inst_info.flags = VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
  140. }
  141. static const char *instance_validation_layers[] = {
  142. "VK_LAYER_KHRONOS_validation"
  143. };
  144. if (enable_debug_layer) {
  145. inst_info.enabledLayerCount = sizeof(instance_validation_layers) / sizeof(instance_validation_layers[0]);
  146. inst_info.ppEnabledLayerNames = instance_validation_layers;
  147. uint32_t layerCount = 0;
  148. vkEnumerateInstanceLayerProperties(&layerCount, 0);
  149. VkLayerProperties *layerprop = (VkLayerProperties *)malloc(sizeof(VkLayerProperties) * layerCount);
  150. vkEnumerateInstanceLayerProperties(&layerCount, layerprop);
  151. printf("vkEnumerateInstanceLayerProperties:");
  152. for (uint32_t i = 0; i < layerCount; ++i) {
  153. printf("%s\n", layerprop[i].layerName);
  154. }
  155. free(layerprop);
  156. }
  157. VkInstance inst;
  158. VkResult res;
  159. res = vkCreateInstance(&inst_info, NULL, &inst);
  160. free((void*) extensions);
  161. if (res == VK_ERROR_INCOMPATIBLE_DRIVER) {
  162. printf("cannot find a compatible Vulkan ICD\n");
  163. exit(-1);
  164. } else if (res) {
  165. switch(res){
  166. case(VK_ERROR_OUT_OF_HOST_MEMORY):printf("VK_ERROR_OUT_OF_HOST_MEMORY\n");break;
  167. case(VK_ERROR_OUT_OF_DEVICE_MEMORY):printf("VK_ERROR_OUT_OF_DEVICE_MEMORY\n");break;
  168. case(VK_ERROR_INITIALIZATION_FAILED):printf("VK_ERROR_INITIALIZATION_FAILED\n");break;
  169. case(VK_ERROR_LAYER_NOT_PRESENT):printf("VK_ERROR_LAYER_NOT_PRESENT\n");break;
  170. case(VK_ERROR_EXTENSION_NOT_PRESENT):printf("VK_ERROR_EXTENSION_NOT_PRESENT\n");break;
  171. default:printf("unknown error %d\n", res);break;
  172. }
  173. exit(-1);
  174. }
  175. if (enable_debug_layer) {
  176. VkDebugUtilsMessengerCreateInfoEXT createInfo = {0};
  177. createInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
  178. createInfo.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
  179. createInfo.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
  180. createInfo.pfnUserCallback = debugCallback;
  181. PFN_vkCreateDebugUtilsMessengerEXT fn_vkCreateDebugUtilsMessengerEXT = (PFN_vkCreateDebugUtilsMessengerEXT) vkGetInstanceProcAddr(inst, "vkCreateDebugUtilsMessengerEXT");
  182. if (!fn_vkCreateDebugUtilsMessengerEXT) {
  183. printf("vkCreateDebugUtilsMessengerEXT not found\n");
  184. } else {
  185. VkResult res = fn_vkCreateDebugUtilsMessengerEXT(inst, &createInfo, NULL, &debugMessenger);
  186. if (res != VK_SUCCESS)
  187. {
  188. debugMessenger = NULL;
  189. printf("CreateDebugUtilsMessengerEXT failed (%d)\n", res);
  190. }
  191. }
  192. }
  193. return inst;
  194. }
  195. #endif
  196. static void destroyDebugCallback(VkInstance instance) {
  197. if (!debugMessenger)
  198. return;
  199. PFN_vkDestroyDebugUtilsMessengerEXT fn_vkDestroyDebugUtilsMessengerEXT = (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance, "vkDestroyDebugUtilsMessengerEXT");
  200. if (fn_vkDestroyDebugUtilsMessengerEXT)
  201. {
  202. fn_vkDestroyDebugUtilsMessengerEXT(instance, debugMessenger, NULL);
  203. }
  204. }
  205. VkCommandPool createCmdPool(VulkanDevice *device) {
  206. VkResult res;
  207. /* Create a command pool to allocate our command buffer from */
  208. VkCommandPoolCreateInfo cmd_pool_info = {VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO};
  209. cmd_pool_info.queueFamilyIndex = device->graphicsQueueFamilyIndex;
  210. cmd_pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
  211. VkCommandPool cmd_pool;
  212. res = vkCreateCommandPool(device->device, &cmd_pool_info, NULL, &cmd_pool);
  213. assert(res == VK_SUCCESS);
  214. return cmd_pool;
  215. }
  216. VkCommandBuffer* createCmdBuffer(VkDevice device, VkCommandPool cmd_pool, uint32_t command_buffer_count) {
  217. VkResult res;
  218. VkCommandBufferAllocateInfo cmd = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO};
  219. cmd.commandPool = cmd_pool;
  220. cmd.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
  221. cmd.commandBufferCount = command_buffer_count;
  222. VkCommandBuffer *cmd_buffer = calloc(command_buffer_count, sizeof (VkCommandBuffer));
  223. res = vkAllocateCommandBuffers(device, &cmd, cmd_buffer);
  224. assert(res == VK_SUCCESS);
  225. return cmd_buffer;
  226. }
  227. bool memory_type_from_properties(VkPhysicalDeviceMemoryProperties memoryProps, uint32_t typeBits, VkFlags requirements_mask, uint32_t *typeIndex) {
  228. // Search memtypes to find first index with those properties
  229. for (uint32_t i = 0; i < memoryProps.memoryTypeCount; i++) {
  230. if ((typeBits & 1) == 1) {
  231. // Type is available, does it match user properties?
  232. if ((memoryProps.memoryTypes[i].propertyFlags & requirements_mask) == requirements_mask) {
  233. *typeIndex = i;
  234. return true;
  235. }
  236. }
  237. typeBits >>= 1;
  238. }
  239. // No memory types matched, return failure
  240. return false;
  241. }
  242. DepthBuffer createDepthBuffer(const VulkanDevice *device, int width, int height) {
  243. VkResult res;
  244. DepthBuffer depth;
  245. depth.format = VK_FORMAT_D24_UNORM_S8_UINT;
  246. #define dformats 3
  247. const VkFormat depth_formats[dformats] = {VK_FORMAT_D32_SFLOAT_S8_UINT, VK_FORMAT_D24_UNORM_S8_UINT, VK_FORMAT_D16_UNORM_S8_UINT};
  248. VkImageTiling image_tilling;
  249. for (int i=0;i<dformats;i++){
  250. VkFormatProperties fprops;
  251. vkGetPhysicalDeviceFormatProperties(device->gpu, depth_formats[i], &fprops);
  252. if (fprops.linearTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  253. depth.format=depth_formats[i];
  254. image_tilling = VK_IMAGE_TILING_LINEAR;
  255. break;
  256. } else if (fprops.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
  257. depth.format=depth_formats[i];
  258. image_tilling = VK_IMAGE_TILING_OPTIMAL;
  259. break;
  260. }
  261. if(i==dformats-1){
  262. printf("Failed to find supported depth format!\n");
  263. exit(-1);
  264. }
  265. }
  266. const VkFormat depth_format = depth.format;
  267. VkImageCreateInfo image_info = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO};
  268. image_info.imageType = VK_IMAGE_TYPE_2D;
  269. image_info.format = depth_format;
  270. image_info.tiling = image_tilling;
  271. image_info.extent.width = width;
  272. image_info.extent.height = height;
  273. image_info.extent.depth = 1;
  274. image_info.mipLevels = 1;
  275. image_info.arrayLayers = 1;
  276. image_info.samples = VK_SAMPLE_COUNT_1_BIT;
  277. image_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  278. image_info.queueFamilyIndexCount = 0;
  279. image_info.pQueueFamilyIndices = NULL;
  280. image_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  281. image_info.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
  282. VkMemoryAllocateInfo mem_alloc = {VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO};
  283. VkImageViewCreateInfo view_info = {VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO};
  284. view_info.format = depth_format;
  285. view_info.components.r = VK_COMPONENT_SWIZZLE_R;
  286. view_info.components.g = VK_COMPONENT_SWIZZLE_G;
  287. view_info.components.b = VK_COMPONENT_SWIZZLE_B;
  288. view_info.components.a = VK_COMPONENT_SWIZZLE_A;
  289. view_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
  290. view_info.subresourceRange.baseMipLevel = 0;
  291. view_info.subresourceRange.levelCount = 1;
  292. view_info.subresourceRange.baseArrayLayer = 0;
  293. view_info.subresourceRange.layerCount = 1;
  294. view_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
  295. if (depth_format == VK_FORMAT_D16_UNORM_S8_UINT || depth_format == VK_FORMAT_D24_UNORM_S8_UINT ||
  296. depth_format == VK_FORMAT_D32_SFLOAT_S8_UINT) {
  297. view_info.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
  298. }
  299. VkMemoryRequirements mem_reqs;
  300. /* Create image */
  301. res = vkCreateImage(device->device, &image_info, NULL, &depth.image);
  302. assert(res == VK_SUCCESS);
  303. vkGetImageMemoryRequirements(device->device, depth.image, &mem_reqs);
  304. mem_alloc.allocationSize = mem_reqs.size;
  305. /* Use the memory properties to determine the type of memory required */
  306. bool pass =
  307. memory_type_from_properties(device->memoryProperties, mem_reqs.memoryTypeBits, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, &mem_alloc.memoryTypeIndex);
  308. assert(pass);
  309. /* Allocate memory */
  310. res = vkAllocateMemory(device->device, &mem_alloc, NULL, &depth.mem);
  311. assert(res == VK_SUCCESS);
  312. /* Bind memory */
  313. res = vkBindImageMemory(device->device, depth.image, depth.mem, 0);
  314. assert(res == VK_SUCCESS);
  315. /* Create image view */
  316. view_info.image = depth.image;
  317. res = vkCreateImageView(device->device, &view_info, NULL, &depth.view);
  318. assert(res == VK_SUCCESS);
  319. return depth;
  320. }
  321. static void setupImageLayout(VkCommandBuffer cmdbuffer, VkImage image,
  322. VkImageAspectFlags aspectMask,
  323. VkImageLayout old_image_layout,
  324. VkImageLayout new_image_layout) {
  325. VkImageMemoryBarrier image_memory_barrier = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER};
  326. image_memory_barrier.oldLayout = old_image_layout;
  327. image_memory_barrier.newLayout = new_image_layout;
  328. image_memory_barrier.image = image;
  329. VkImageSubresourceRange subresourceRange = {aspectMask, 0, 1, 0, 1};
  330. image_memory_barrier.subresourceRange = subresourceRange;
  331. if (new_image_layout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
  332. /* Make sure anything that was copying from this image has completed */
  333. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  334. }
  335. if (new_image_layout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
  336. image_memory_barrier.dstAccessMask =
  337. VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
  338. }
  339. if (new_image_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL) {
  340. image_memory_barrier.dstAccessMask =
  341. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  342. }
  343. if (new_image_layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) {
  344. /* Make sure any Copy or CPU writes to image are flushed */
  345. image_memory_barrier.dstAccessMask =
  346. VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
  347. }
  348. VkImageMemoryBarrier *pmemory_barrier = &image_memory_barrier;
  349. VkPipelineStageFlags src_stages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
  350. VkPipelineStageFlags dest_stages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
  351. vkCmdPipelineBarrier(cmdbuffer, src_stages, dest_stages, 0, 0, NULL,
  352. 0, NULL, 1, pmemory_barrier);
  353. }
  354. SwapchainBuffers createSwapchainBuffers(const VulkanDevice *device, VkFormat format, VkCommandBuffer cmdbuffer, VkImage image) {
  355. VkResult res;
  356. SwapchainBuffers buffer;
  357. VkImageViewCreateInfo color_attachment_view = {VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO};
  358. color_attachment_view.format = format;
  359. color_attachment_view.components.r = VK_COMPONENT_SWIZZLE_R;
  360. color_attachment_view.components.g = VK_COMPONENT_SWIZZLE_G;
  361. color_attachment_view.components.b = VK_COMPONENT_SWIZZLE_B;
  362. color_attachment_view.components.a = VK_COMPONENT_SWIZZLE_A;
  363. VkImageSubresourceRange subresourceRange = {0};
  364. subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  365. subresourceRange.baseMipLevel = 0;
  366. subresourceRange.levelCount = 1;
  367. subresourceRange.baseArrayLayer = 0;
  368. subresourceRange.layerCount = 1;
  369. color_attachment_view.subresourceRange = subresourceRange;
  370. color_attachment_view.viewType = VK_IMAGE_VIEW_TYPE_2D;
  371. buffer.image = image;
  372. setupImageLayout(
  373. cmdbuffer, image, VK_IMAGE_ASPECT_COLOR_BIT,
  374. VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR);
  375. color_attachment_view.image = buffer.image;
  376. res = vkCreateImageView(device->device, &color_attachment_view, NULL,
  377. &buffer.view);
  378. assert(res == VK_SUCCESS);
  379. return buffer;
  380. }
  381. VkRenderPass createRenderPass(VkDevice device, VkFormat color_format, VkFormat depth_format) {
  382. VkAttachmentDescription attachments[2] = {{0}};
  383. attachments[0].format = color_format;
  384. attachments[0].samples = VK_SAMPLE_COUNT_1_BIT;
  385. attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
  386. attachments[0].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
  387. attachments[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
  388. attachments[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  389. attachments[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  390. attachments[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  391. attachments[1].format = depth_format;
  392. attachments[1].samples = VK_SAMPLE_COUNT_1_BIT;
  393. attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
  394. attachments[1].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  395. attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
  396. attachments[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  397. attachments[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  398. attachments[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  399. VkAttachmentReference color_reference = {0};
  400. color_reference.attachment = 0;
  401. color_reference.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  402. VkAttachmentReference depth_reference = {0};
  403. depth_reference.attachment = 1;
  404. depth_reference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  405. VkSubpassDescription subpass = {0};
  406. subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
  407. subpass.flags = 0;
  408. subpass.inputAttachmentCount = 0;
  409. subpass.pInputAttachments = NULL;
  410. subpass.colorAttachmentCount = 1;
  411. subpass.pColorAttachments = &color_reference;
  412. subpass.pResolveAttachments = NULL;
  413. subpass.pDepthStencilAttachment = &depth_reference;
  414. subpass.preserveAttachmentCount = 0;
  415. subpass.pPreserveAttachments = NULL;
  416. VkRenderPassCreateInfo rp_info = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
  417. rp_info.attachmentCount = 2;
  418. rp_info.pAttachments = attachments;
  419. rp_info.subpassCount = 1;
  420. rp_info.pSubpasses = &subpass;
  421. VkRenderPass render_pass;
  422. VkResult res;
  423. res = vkCreateRenderPass(device, &rp_info, NULL, &render_pass);
  424. assert(res == VK_SUCCESS);
  425. return render_pass;
  426. }
  427. FrameBuffers createFrameBuffers(const VulkanDevice *device, VkSurfaceKHR surface, VkQueue queue, int winWidth, int winHeight, VkSwapchainKHR oldSwapchain) {
  428. VkResult res;
  429. VkBool32 supportsPresent;
  430. vkGetPhysicalDeviceSurfaceSupportKHR(device->gpu, device->graphicsQueueFamilyIndex, surface, &supportsPresent);
  431. if (!supportsPresent) {
  432. exit(-1); //does not supported.
  433. }
  434. VkCommandBuffer *setup_cmd_buffer = createCmdBuffer(device->device, device->commandPool, 1);
  435. const VkCommandBufferBeginInfo cmd_buf_info = {
  436. VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
  437. };
  438. vkBeginCommandBuffer(setup_cmd_buffer[0], &cmd_buf_info);
  439. VkFormat colorFormat = VK_FORMAT_B8G8R8A8_UNORM;
  440. VkColorSpaceKHR colorSpace;
  441. {
  442. // Get the list of VkFormats that are supported:
  443. uint32_t formatCount;
  444. res = vkGetPhysicalDeviceSurfaceFormatsKHR(device->gpu, surface, &formatCount, NULL);
  445. assert(res == VK_SUCCESS);
  446. VkSurfaceFormatKHR *surfFormats = (VkSurfaceFormatKHR *)malloc(formatCount * sizeof(VkSurfaceFormatKHR));
  447. res = vkGetPhysicalDeviceSurfaceFormatsKHR(device->gpu, surface, &formatCount, surfFormats);
  448. assert(res == VK_SUCCESS);
  449. // If the format list includes just one entry of VK_FORMAT_UNDEFINED,
  450. // the surface has no preferred format. Otherwise, at least one
  451. // supported format will be returned.
  452. if (formatCount == 1 && surfFormats[0].format == VK_FORMAT_UNDEFINED) {
  453. colorFormat = VK_FORMAT_B8G8R8A8_UNORM;
  454. } else {
  455. assert(formatCount >= 1);
  456. colorFormat = surfFormats[0].format;
  457. }
  458. colorSpace = surfFormats[0].colorSpace;
  459. free(surfFormats);
  460. }
  461. colorFormat = VK_FORMAT_B8G8R8A8_UNORM;
  462. // Check the surface capabilities and formats
  463. VkSurfaceCapabilitiesKHR surfCapabilities;
  464. res = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
  465. device->gpu, surface, &surfCapabilities);
  466. assert(res == VK_SUCCESS);
  467. VkExtent2D buffer_size;
  468. // width and height are either both -1, or both not -1.
  469. if (surfCapabilities.currentExtent.width == (uint32_t)-1) {
  470. buffer_size.width = winWidth;
  471. buffer_size.height = winHeight;
  472. } else {
  473. // If the surface size is defined, the swap chain size must match
  474. buffer_size = surfCapabilities.currentExtent;
  475. }
  476. DepthBuffer depth = createDepthBuffer(device, buffer_size.width, buffer_size.height);
  477. VkRenderPass render_pass = createRenderPass(device->device, colorFormat, depth.format);
  478. VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR;
  479. uint32_t presentModeCount;
  480. vkGetPhysicalDeviceSurfacePresentModesKHR(device->gpu, surface, &presentModeCount, NULL);
  481. assert(presentModeCount > 0);
  482. VkPresentModeKHR *presentModes = (VkPresentModeKHR *)malloc(sizeof(VkPresentModeKHR) * presentModeCount);
  483. vkGetPhysicalDeviceSurfacePresentModesKHR(device->gpu, surface, &presentModeCount, presentModes);
  484. for (size_t i = 0; i < presentModeCount; i++) {
  485. if (presentModes[i] == VK_PRESENT_MODE_MAILBOX_KHR) {
  486. swapchainPresentMode = VK_PRESENT_MODE_MAILBOX_KHR;
  487. break;
  488. }
  489. if ((swapchainPresentMode != VK_PRESENT_MODE_MAILBOX_KHR) && (presentModes[i] == VK_PRESENT_MODE_IMMEDIATE_KHR)) {
  490. swapchainPresentMode = VK_PRESENT_MODE_IMMEDIATE_KHR;
  491. }
  492. }
  493. free(presentModes);
  494. VkSurfaceTransformFlagBitsKHR preTransform;
  495. if (surfCapabilities.supportedTransforms &
  496. VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) {
  497. preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
  498. } else {
  499. preTransform = surfCapabilities.currentTransform;
  500. }
  501. // Determine the number of VkImage's to use in the swap chain (we desire to
  502. // own only 1 image at a time, besides the images being displayed and
  503. // queued for display):
  504. uint32_t desiredNumberOfSwapchainImages =
  505. fmax(surfCapabilities.minImageCount+1, 3);
  506. if ((surfCapabilities.maxImageCount > 0) &&
  507. (desiredNumberOfSwapchainImages > surfCapabilities.maxImageCount)) {
  508. // Application must settle for fewer images than desired:
  509. desiredNumberOfSwapchainImages = surfCapabilities.maxImageCount;
  510. }
  511. VkSwapchainCreateInfoKHR swapchainInfo = {VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR};
  512. swapchainInfo.surface = surface;
  513. swapchainInfo.minImageCount = desiredNumberOfSwapchainImages;
  514. swapchainInfo.imageFormat = colorFormat;
  515. swapchainInfo.imageColorSpace = colorSpace;
  516. swapchainInfo.imageExtent = buffer_size;
  517. swapchainInfo.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
  518. swapchainInfo.preTransform = preTransform;
  519. swapchainInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
  520. swapchainInfo.imageArrayLayers = 1;
  521. swapchainInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
  522. swapchainInfo.presentMode = swapchainPresentMode;
  523. swapchainInfo.oldSwapchain = oldSwapchain;
  524. swapchainInfo.clipped = true;
  525. VkSwapchainKHR swap_chain;
  526. res = vkCreateSwapchainKHR(device->device, &swapchainInfo, NULL,
  527. &swap_chain);
  528. assert(res == VK_SUCCESS);
  529. if (oldSwapchain != VK_NULL_HANDLE) {
  530. vkDestroySwapchainKHR(device->device, oldSwapchain, NULL);
  531. }
  532. uint32_t swapchain_image_count;
  533. res = vkGetSwapchainImagesKHR(device->device, swap_chain,
  534. &swapchain_image_count, NULL);
  535. assert(res == VK_SUCCESS);
  536. VkImage *swapchainImages =
  537. (VkImage *)malloc(swapchain_image_count * sizeof(VkImage));
  538. assert(swapchainImages);
  539. res = vkGetSwapchainImagesKHR(device->device, swap_chain,
  540. &swapchain_image_count,
  541. swapchainImages);
  542. assert(res == VK_SUCCESS);
  543. SwapchainBuffers *swap_chain_buffers = (SwapchainBuffers *)malloc(swapchain_image_count * sizeof(SwapchainBuffers));
  544. for (uint32_t i = 0; i < swapchain_image_count; i++) {
  545. swap_chain_buffers[i] = createSwapchainBuffers(device, colorFormat, setup_cmd_buffer[0], swapchainImages[i]);
  546. }
  547. free(swapchainImages);
  548. VkImageView attachments[2];
  549. attachments[1] = depth.view;
  550. VkFramebufferCreateInfo fb_info = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO};
  551. fb_info.renderPass = render_pass;
  552. fb_info.attachmentCount = 2;
  553. fb_info.pAttachments = attachments;
  554. fb_info.width = buffer_size.width;
  555. fb_info.height = buffer_size.height;
  556. fb_info.layers = 1;
  557. uint32_t i;
  558. VkFramebuffer *framebuffers = (VkFramebuffer *)malloc(swapchain_image_count *
  559. sizeof(VkFramebuffer));
  560. assert(framebuffers);
  561. for (i = 0; i < swapchain_image_count; i++) {
  562. attachments[0] = swap_chain_buffers[i].view;
  563. res = vkCreateFramebuffer(device->device, &fb_info, NULL,
  564. &framebuffers[i]);
  565. assert(res == VK_SUCCESS);
  566. }
  567. vkEndCommandBuffer(setup_cmd_buffer[0]);
  568. VkSubmitInfo submitInfo = {VK_STRUCTURE_TYPE_SUBMIT_INFO};
  569. submitInfo.commandBufferCount = 1;
  570. submitInfo.pCommandBuffers = setup_cmd_buffer;
  571. vkQueueSubmit(queue, 1, &submitInfo, VK_NULL_HANDLE);
  572. vkQueueWaitIdle(queue);
  573. vkFreeCommandBuffers(device->device, device->commandPool, 1, setup_cmd_buffer);
  574. free(setup_cmd_buffer);
  575. FrameBuffers buffer = {0};
  576. buffer.swap_chain = swap_chain;
  577. buffer.swap_chain_buffers = swap_chain_buffers;
  578. buffer.swapchain_image_count = swapchain_image_count;
  579. buffer.framebuffers = framebuffers;
  580. buffer.current_buffer = 0;
  581. buffer.format = colorFormat;
  582. buffer.buffer_size = buffer_size;
  583. buffer.render_pass = render_pass;
  584. buffer.depth = depth;
  585. buffer.present_complete_semaphore = (VkSemaphore *)calloc(swapchain_image_count, sizeof(VkSemaphore));
  586. buffer.render_complete_semaphore = (VkSemaphore *)calloc(swapchain_image_count, sizeof(VkSemaphore));
  587. buffer.flight_fence = (VkFence *)calloc(swapchain_image_count, sizeof(VkFence));
  588. VkSemaphoreCreateInfo presentCompleteSemaphoreCreateInfo = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO};
  589. VkFenceCreateInfo fenceCreateInfo = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO};
  590. for (i = 0; i < swapchain_image_count; i++) {
  591. res = vkCreateSemaphore(device->device, &presentCompleteSemaphoreCreateInfo, NULL, &buffer.present_complete_semaphore[i]);
  592. assert(res == VK_SUCCESS);
  593. res = vkCreateSemaphore(device->device, &presentCompleteSemaphoreCreateInfo, NULL, &buffer.render_complete_semaphore[i]);
  594. assert(res == VK_SUCCESS);
  595. res = vkCreateFence(device->device, &fenceCreateInfo, NULL, &buffer.flight_fence[i]);
  596. assert(res == VK_SUCCESS);
  597. }
  598. return buffer;
  599. }
  600. void destroyFrameBuffers(const VulkanDevice *device, FrameBuffers *buffer, VkQueue queue) {
  601. VkResult res = vkQueueWaitIdle(queue);
  602. assert(res == VK_SUCCESS);
  603. for (uint32_t i = 0; i < buffer->swapchain_image_count; ++i) {
  604. if (buffer->present_complete_semaphore[i] != VK_NULL_HANDLE) {
  605. vkDestroySemaphore(device->device, buffer->present_complete_semaphore[i], NULL);
  606. }
  607. if (buffer->render_complete_semaphore[i] != VK_NULL_HANDLE) {
  608. vkDestroySemaphore(device->device, buffer->render_complete_semaphore[i], NULL);
  609. }
  610. if (buffer->flight_fence[i] != VK_NULL_HANDLE) {
  611. vkDestroyFence(device->device, buffer->flight_fence[i], NULL);
  612. }
  613. }
  614. for (uint32_t i = 0; i < buffer->swapchain_image_count; ++i) {
  615. vkDestroyImageView(device->device, buffer->swap_chain_buffers[i].view, 0);
  616. vkDestroyFramebuffer(device->device, buffer->framebuffers[i], 0);
  617. }
  618. vkDestroyImageView(device->device, buffer->depth.view, 0);
  619. vkDestroyImage(device->device, buffer->depth.image, 0);
  620. vkFreeMemory(device->device, buffer->depth.mem, 0);
  621. vkDestroyRenderPass(device->device, buffer->render_pass, 0);
  622. vkDestroySwapchainKHR(device->device, buffer->swap_chain, 0);
  623. free(buffer->framebuffers);
  624. free(buffer->swap_chain_buffers);
  625. free(buffer->present_complete_semaphore);
  626. free(buffer->render_complete_semaphore);
  627. free(buffer->flight_fence);
  628. }