vk_utils.c 60 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629
  1. // Danil, 2021+ Vulkan shader launcher, self https://github.com/danilw/vulkan-shadertoy-launcher
  2. // The MIT License
  3. #include "vk_utils.h"
  4. void vk_exit(VkInstance vk)
  5. {
  6. vkDestroyInstance(vk, NULL);
  7. }
  8. vk_error vk_enumerate_devices(VkInstance vk, VkSurfaceKHR *surface, struct vk_physical_device *devs, uint32_t *idx, bool use_idx)
  9. {
  10. vk_error retval = VK_ERROR_NONE;
  11. VkResult res;
  12. uint32_t count = 0;
  13. bool last_use_idx=false;
  14. uint32_t last_idx=0; // last non DISCRETE_GPU
  15. res = vkEnumeratePhysicalDevices(vk, &count, NULL);
  16. vk_error_set_vkresult(&retval, res);
  17. if (res < 0) {
  18. return retval;
  19. }
  20. if (count < 1){
  21. printf("No Vulkan device found.\n");
  22. vk_error_set_vkresult(&retval, VK_ERROR_INCOMPATIBLE_DRIVER);
  23. return retval;
  24. }
  25. VkPhysicalDevice *phy_devs;
  26. phy_devs = malloc(count * sizeof(VkPhysicalDevice));
  27. res = vkEnumeratePhysicalDevices(vk, &count, phy_devs);
  28. vk_error_set_vkresult(&retval, res);
  29. if (res < 0) {
  30. free(phy_devs);
  31. phy_devs = NULL;
  32. return retval;
  33. }
  34. for (uint32_t i = 0; i < count && (!use_idx); i++)
  35. {
  36. uint32_t qfc = 0;
  37. vkGetPhysicalDeviceQueueFamilyProperties(phy_devs[i], &qfc, NULL);
  38. if (qfc < 1)continue;
  39. VkQueueFamilyProperties *queue_family_properties;
  40. queue_family_properties = malloc(qfc * sizeof(VkQueueFamilyProperties));
  41. vkGetPhysicalDeviceQueueFamilyProperties(phy_devs[i], &qfc, queue_family_properties);
  42. for (uint32_t j = 0; j < qfc; j++)
  43. {
  44. VkBool32 supports_present;
  45. vkGetPhysicalDeviceSurfaceSupportKHR(phy_devs[i], j, *surface, &supports_present);
  46. if ((queue_family_properties[j].queueFlags & VK_QUEUE_GRAPHICS_BIT) && supports_present)
  47. {
  48. VkPhysicalDeviceProperties pr;
  49. vkGetPhysicalDeviceProperties(phy_devs[i], &pr);
  50. if(pr.deviceType==VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU){
  51. *idx = i;
  52. use_idx = true;
  53. }else{
  54. last_use_idx = true;
  55. last_idx = i;
  56. }
  57. break;
  58. }
  59. }
  60. free(queue_family_properties);
  61. }
  62. if(last_use_idx&&(!use_idx)){
  63. use_idx = true;
  64. *idx = last_idx;
  65. }
  66. if (!use_idx){
  67. printf("Not found suitable queue which supports graphics.\n");
  68. vk_error_set_vkresult(&retval, VK_ERROR_INCOMPATIBLE_DRIVER);
  69. free(phy_devs);
  70. phy_devs = NULL;
  71. return retval;
  72. }
  73. if (*idx >= count){
  74. printf("Wrong GPU index %lu, max devices count %lu\n", (unsigned long)*idx, (unsigned long) count);
  75. vk_error_set_vkresult(&retval, VK_ERROR_INCOMPATIBLE_DRIVER);
  76. free(phy_devs);
  77. phy_devs = NULL;
  78. return retval;
  79. }
  80. printf("Using GPU device %lu\n", (unsigned long) *idx);
  81. devs[0].physical_device = phy_devs[*idx];
  82. vkGetPhysicalDeviceProperties(devs[0].physical_device, &devs[0].properties);
  83. vkGetPhysicalDeviceFeatures(devs[0].physical_device, &devs[0].features);
  84. vkGetPhysicalDeviceMemoryProperties(devs[0].physical_device, &devs[0].memories);
  85. printf("Vulkan GPU - %s: %s (id: 0x%04X) from vendor 0x%04X [driver version: 0x%04X, API version: 0x%04X]\n",
  86. vk_VkPhysicalDeviceType_string(devs[0].properties.deviceType), devs[0].properties.deviceName,
  87. devs[0].properties.deviceID, devs[0].properties.vendorID, devs[0].properties.driverVersion, devs[0].properties.apiVersion);
  88. uint32_t qfc = 0;
  89. devs[0].queue_family_count = VK_MAX_QUEUE_FAMILY;
  90. vkGetPhysicalDeviceQueueFamilyProperties(devs[0].physical_device, &qfc, NULL);
  91. vkGetPhysicalDeviceQueueFamilyProperties(devs[0].physical_device, &devs[0].queue_family_count, devs[0].queue_families);
  92. devs[0].queue_families_incomplete = devs[0].queue_family_count < qfc;
  93. free(phy_devs);
  94. phy_devs = NULL;
  95. return retval;
  96. }
  97. vk_error vk_get_commands(struct vk_physical_device *phy_dev, struct vk_device *dev, VkDeviceQueueCreateInfo queue_info[], uint32_t queue_info_count, uint32_t create_count)
  98. {
  99. vk_error retval = VK_ERROR_NONE;
  100. VkResult res;
  101. bool create_num_cmd=false; //create many cmd buffers in one Queue
  102. if(create_count>0)
  103. {
  104. queue_info_count=1;
  105. create_num_cmd=true;
  106. }
  107. dev->command_pools = malloc(queue_info_count * sizeof *dev->command_pools);
  108. if (dev->command_pools == NULL)
  109. {
  110. vk_error_set_errno(&retval, errno);
  111. return retval;
  112. }
  113. for (uint32_t i = 0; i < queue_info_count; ++i)
  114. {
  115. struct vk_commands *cmd = &dev->command_pools[i];
  116. *cmd = (struct vk_commands){0};
  117. cmd->qflags = phy_dev->queue_families[queue_info[i].queueFamilyIndex].queueFlags;
  118. VkCommandPoolCreateInfo pool_info = {
  119. .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
  120. .flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
  121. .queueFamilyIndex = queue_info[i].queueFamilyIndex,
  122. };
  123. res = vkCreateCommandPool(dev->device, &pool_info, NULL, &cmd->pool);
  124. vk_error_set_vkresult(&retval, res);
  125. if (res < 0)
  126. return retval;
  127. ++dev->command_pool_count;
  128. cmd->queues = malloc(queue_info[i].queueCount * sizeof *cmd->queues);
  129. if(!create_num_cmd){
  130. cmd->buffers = malloc(queue_info[i].queueCount * sizeof *cmd->buffers);
  131. }else {
  132. cmd->buffers = malloc(create_count * sizeof *cmd->buffers);
  133. }
  134. if (cmd->queues == NULL || cmd->buffers == NULL)
  135. {
  136. vk_error_set_errno(&retval, errno);
  137. return retval;
  138. }
  139. for (uint32_t j = 0; j < queue_info[i].queueCount; ++j)
  140. vkGetDeviceQueue(dev->device, queue_info[i].queueFamilyIndex, j, &cmd->queues[j]);
  141. cmd->queue_count = queue_info[i].queueCount;
  142. VkCommandBufferAllocateInfo buffer_info = {
  143. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
  144. .commandPool = cmd->pool,
  145. .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
  146. .commandBufferCount = create_num_cmd?create_count:queue_info[i].queueCount,
  147. };
  148. res = vkAllocateCommandBuffers(dev->device, &buffer_info, cmd->buffers);
  149. vk_error_set_vkresult(&retval, res);
  150. if (res)
  151. return retval;
  152. cmd->buffer_count = create_num_cmd?create_count:queue_info[i].queueCount;
  153. }
  154. return retval;
  155. }
  156. void vk_cleanup(struct vk_device *dev)
  157. {
  158. vkDeviceWaitIdle(dev->device);
  159. for (uint32_t i = 0; i < dev->command_pool_count; ++i)
  160. {
  161. free(dev->command_pools[i].queues);
  162. free(dev->command_pools[i].buffers);
  163. vkDestroyCommandPool(dev->device, dev->command_pools[i].pool, NULL);
  164. }
  165. free(dev->command_pools);
  166. vkDestroyDevice(dev->device, NULL);
  167. *dev = (struct vk_device){0};
  168. }
  169. vk_error vk_load_shader(struct vk_device *dev, const uint32_t *code, VkShaderModule *shader, size_t size)
  170. {
  171. vk_error retval = VK_ERROR_NONE;
  172. VkResult res;
  173. VkShaderModuleCreateInfo info = {
  174. .sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
  175. .codeSize = size,
  176. .pCode = code,
  177. };
  178. res = vkCreateShaderModule(dev->device, &info, NULL, shader);
  179. vk_error_set_vkresult(&retval, res);
  180. return retval;
  181. }
  182. #ifdef YARIV_SHADER
  183. vk_error vk_load_shader_yariv(struct vk_device *dev, const uint32_t *yariv_code, VkShaderModule *shader, size_t in_yariv_size)
  184. {
  185. vk_error retval = VK_ERROR_NONE;
  186. void *in_yariv = (void *)yariv_code;
  187. size_t out_spirv_size = yariv_decode_size(in_yariv, in_yariv_size);
  188. uint32_t *out_spirv = malloc(out_spirv_size);
  189. yariv_decode(out_spirv, out_spirv_size, in_yariv, in_yariv_size);
  190. retval = vk_load_shader(dev, out_spirv, shader, out_spirv_size);
  191. free(out_spirv);
  192. return retval;
  193. }
  194. #endif
  195. vk_error vk_load_shader_spirv_file(struct vk_device *dev, const char *spirv_file, VkShaderModule *shader)
  196. {
  197. vk_error retval = VK_ERROR_NONE;
  198. VkResult res;
  199. char *code = NULL;
  200. size_t size = 0, cur = 0;
  201. FILE *fin = fopen(spirv_file, "rb");
  202. *shader = NULL;
  203. if (fin == NULL)
  204. {
  205. vk_error_set_errno(&retval, errno);
  206. return retval;
  207. }
  208. fseek(fin, 0, SEEK_END);
  209. size = ftell(fin);
  210. fseek(fin, 0, SEEK_SET);
  211. code = malloc(size);
  212. if (code == NULL)
  213. {
  214. vk_error_set_errno(&retval, errno);
  215. fclose(fin);
  216. return retval;
  217. }
  218. while (cur < size)
  219. {
  220. size_t read = fread(code + cur, 1, size - cur, fin);
  221. if (read == 0)
  222. {
  223. vk_error_set_errno(&retval, errno);
  224. free(code);
  225. fclose(fin);
  226. return retval;
  227. }
  228. cur += read;
  229. }
  230. retval = vk_load_shader(dev, (uint32_t*)code, shader, size);
  231. free(code);
  232. fclose(fin);
  233. return retval;
  234. }
  235. void vk_free_shader(struct vk_device *dev, VkShaderModule shader)
  236. {
  237. vkDestroyShaderModule(dev->device, shader, NULL);
  238. }
  239. uint32_t vk_find_suitable_memory(struct vk_physical_device *phy_dev, struct vk_device *dev,
  240. VkMemoryRequirements *mem_req, VkMemoryPropertyFlags properties)
  241. {
  242. for (uint32_t i = 0; i < phy_dev->memories.memoryTypeCount; ++i)
  243. {
  244. if ((mem_req->memoryTypeBits & 1 << i) == 0)
  245. continue;
  246. if (phy_dev->memories.memoryHeaps[phy_dev->memories.memoryTypes[i].heapIndex].size < mem_req->size)
  247. continue;
  248. if ((phy_dev->memories.memoryTypes[i].propertyFlags & properties) == properties)
  249. return i;
  250. }
  251. return phy_dev->memories.memoryTypeCount;
  252. }
  253. vk_error vk_init_ext(VkInstance *vk, const char *ext_names[], uint32_t ext_count)
  254. {
  255. vk_error retval = VK_ERROR_NONE;
  256. VkResult res;
  257. VkApplicationInfo app_info = {
  258. .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
  259. .pApplicationName = "Vulkan Shader launcher",
  260. .applicationVersion = 0x010000,
  261. .pEngineName = "Vulkan Shader launcher",
  262. .engineVersion = 0x010000,
  263. .apiVersion = VK_API_VERSION_1_0,
  264. };
  265. VkInstanceCreateInfo info;
  266. info = (VkInstanceCreateInfo){
  267. .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
  268. .pNext = NULL,
  269. .pApplicationInfo = &app_info,
  270. .enabledLayerCount = 0,
  271. .ppEnabledLayerNames = NULL,
  272. .enabledExtensionCount = ext_count,
  273. .ppEnabledExtensionNames = ext_names,
  274. };
  275. res = vkCreateInstance(&info, NULL, vk);
  276. vk_error_set_vkresult(&retval, res);
  277. return retval;
  278. }
  279. vk_error vk_get_dev_ext(struct vk_physical_device *phy_dev, struct vk_device *dev, VkQueueFlags qflags,
  280. VkDeviceQueueCreateInfo queue_info[], uint32_t *queue_info_count,
  281. const char *ext_names[], uint32_t ext_count)
  282. {
  283. vk_error retval = VK_ERROR_NONE;
  284. VkResult res;
  285. *dev = (struct vk_device){0};
  286. uint32_t max_queue_count = *queue_info_count;
  287. *queue_info_count = 0;
  288. uint32_t max_family_queues = 0;
  289. for (uint32_t i = 0; i < phy_dev->queue_family_count; ++i)
  290. if (max_family_queues < phy_dev->queue_families[i].queueCount)
  291. max_family_queues = phy_dev->queue_families[i].queueCount;
  292. float *queue_priorities;
  293. int tsize = 0;
  294. if (max_family_queues == 0) tsize = 1;
  295. queue_priorities = malloc((max_family_queues + tsize) * (sizeof(float)));
  296. memset(queue_priorities, 0, (max_family_queues + tsize) * (sizeof(float)));
  297. for (uint32_t i = 0; i < phy_dev->queue_family_count && i < max_queue_count; ++i)
  298. {
  299. if ((phy_dev->queue_families[i].queueFlags & qflags) == 0)
  300. continue;
  301. queue_info[(*queue_info_count)++] = (VkDeviceQueueCreateInfo){
  302. .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
  303. .queueFamilyIndex = i,
  304. .queueCount = phy_dev->queue_families[i].queueCount,
  305. .pQueuePriorities = queue_priorities,
  306. };
  307. }
  308. if (*queue_info_count == 0)
  309. {
  310. free(queue_priorities);
  311. queue_priorities = NULL;
  312. vk_error_set_vkresult(&retval, VK_ERROR_FEATURE_NOT_PRESENT);
  313. return retval;
  314. }
  315. VkDeviceCreateInfo dev_info = {
  316. .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
  317. .queueCreateInfoCount = *queue_info_count,
  318. .pQueueCreateInfos = queue_info,
  319. .enabledExtensionCount = ext_count,
  320. .ppEnabledExtensionNames = ext_names,
  321. .pEnabledFeatures = &phy_dev->features,
  322. };
  323. res = vkCreateDevice(phy_dev->physical_device, &dev_info, NULL, &dev->device);
  324. vk_error_set_vkresult(&retval, res);
  325. free(queue_priorities);
  326. queue_priorities = NULL;
  327. return retval;
  328. }
  329. vk_error vk_create_surface(VkInstance vk, VkSurfaceKHR *surface, struct app_os_window *os_window){
  330. vk_error retval = VK_ERROR_NONE;
  331. VkResult res;
  332. #if defined(VK_USE_PLATFORM_WIN32_KHR)
  333. VkWin32SurfaceCreateInfoKHR createInfo;
  334. createInfo.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
  335. createInfo.pNext = NULL;
  336. createInfo.flags = 0;
  337. createInfo.hinstance = os_window->connection;
  338. createInfo.hwnd = os_window->window;
  339. res = vkCreateWin32SurfaceKHR(vk, &createInfo, NULL, surface);
  340. #elif defined(VK_USE_PLATFORM_XCB_KHR)
  341. VkXcbSurfaceCreateInfoKHR createInfo;
  342. createInfo.sType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR;
  343. createInfo.pNext = NULL;
  344. createInfo.flags = 0;
  345. createInfo.connection = os_window->connection;
  346. createInfo.window = os_window->xcb_window;
  347. res = vkCreateXcbSurfaceKHR(vk, &createInfo, NULL, surface);
  348. #elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
  349. VkWaylandSurfaceCreateInfoKHR createInfo;
  350. createInfo.sType = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR;
  351. createInfo.pNext = NULL;
  352. createInfo.flags = 0;
  353. createInfo.display = os_window->display;
  354. createInfo.surface = os_window->surface;
  355. res = vkCreateWaylandSurfaceKHR(vk, &createInfo, NULL, surface);
  356. #endif
  357. vk_error_set_vkresult(&retval, res);
  358. return retval;
  359. }
  360. vk_error vk_get_swapchain(VkInstance vk, struct vk_physical_device *phy_dev, struct vk_device *dev,
  361. struct vk_swapchain *swapchain, struct app_os_window *os_window, uint32_t thread_count, VkPresentModeKHR *present_mode)
  362. {
  363. vk_error retval = VK_ERROR_NONE;
  364. VkResult res;
  365. VkSwapchainKHR oldSwapchain = swapchain->swapchain;
  366. res = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(phy_dev->physical_device, swapchain->surface, &swapchain->surface_caps);
  367. vk_error_set_vkresult(&retval, res);
  368. if (res)
  369. return retval;
  370. uint32_t image_count = swapchain->surface_caps.minImageCount + thread_count - 1;
  371. if (swapchain->surface_caps.maxImageCount < image_count && swapchain->surface_caps.maxImageCount != 0)
  372. image_count = swapchain->surface_caps.maxImageCount;
  373. uint32_t surface_format_count = 1;
  374. res = vkGetPhysicalDeviceSurfaceFormatsKHR(phy_dev->physical_device, swapchain->surface, &surface_format_count, NULL);
  375. vk_error_set_vkresult(&retval, res);
  376. if (res < 0)
  377. return retval;
  378. if(surface_format_count<1){
  379. retval.error.type=VK_ERROR_ERRNO;
  380. vk_error_printf(&retval, "surface_format_count < 1\n");
  381. return retval;
  382. }
  383. VkSurfaceFormatKHR surface_format[184];
  384. if(surface_format_count>=184) surface_format_count = 184-1;
  385. res = vkGetPhysicalDeviceSurfaceFormatsKHR(phy_dev->physical_device, swapchain->surface, &surface_format_count, &surface_format[0]);
  386. vk_error_set_vkresult(&retval, res);
  387. if (res < 0)
  388. return retval;
  389. swapchain->surface_format=surface_format[0];
  390. if (surface_format_count > 1){
  391. uint32_t suported_format_srgb = VK_FORMAT_B8G8R8A8_SRGB;
  392. int found_srgb = -1;
  393. uint32_t suported_format_linear = VK_FORMAT_B8G8R8A8_UNORM;
  394. int found_linear = -1;
  395. for (int i = 0; i < surface_format_count; i++){
  396. if(surface_format[i].format==suported_format_srgb)found_srgb=i;
  397. if(surface_format[i].format==suported_format_linear)found_linear=i;
  398. }
  399. if(found_linear>=0)swapchain->surface_format=surface_format[found_linear];
  400. else if(found_srgb>=0)swapchain->surface_format=surface_format[found_srgb];
  401. }
  402. if (surface_format_count == 1 && swapchain->surface_format.format == VK_FORMAT_UNDEFINED)
  403. swapchain->surface_format.format = VK_FORMAT_R8G8B8_UNORM;
  404. swapchain->present_modes_count = VK_MAX_PRESENT_MODES;
  405. res = vkGetPhysicalDeviceSurfacePresentModesKHR(phy_dev->physical_device, swapchain->surface,
  406. &swapchain->present_modes_count, swapchain->present_modes);
  407. vk_error_set_vkresult(&retval, res);
  408. if (res >= 0)
  409. {
  410. bool tret=false;
  411. for (uint32_t i = 0; i < swapchain->present_modes_count; ++i)
  412. {
  413. if (swapchain->present_modes[i] == *present_mode)
  414. {
  415. tret=true;
  416. break;
  417. }
  418. }
  419. if(!tret){
  420. bool a=false,b=false,c=false;
  421. for (uint32_t i = 0; i < swapchain->present_modes_count; ++i)
  422. {
  423. if (swapchain->present_modes[i] == VK_PRESENT_MODE_IMMEDIATE_KHR)
  424. {
  425. c=true;
  426. }
  427. if (swapchain->present_modes[i] == VK_PRESENT_MODE_MAILBOX_KHR)
  428. {
  429. b=true;
  430. }
  431. if (swapchain->present_modes[i] == VK_PRESENT_MODE_FIFO_KHR)
  432. {
  433. a=true;
  434. }
  435. }
  436. if(a)*present_mode = VK_PRESENT_MODE_FIFO_KHR;
  437. else if(b)*present_mode = VK_PRESENT_MODE_MAILBOX_KHR;
  438. else if(c)*present_mode = VK_PRESENT_MODE_IMMEDIATE_KHR;
  439. }
  440. }
  441. VkExtent2D swapchainExtent;
  442. VkImageFormatProperties format_properties;
  443. res = vkGetPhysicalDeviceImageFormatProperties(phy_dev->physical_device, swapchain->surface_format.format, VK_IMAGE_TYPE_2D,
  444. VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, 0, &format_properties);
  445. if (res==VK_SUCCESS
  446. && (format_properties.maxExtent.width >= swapchain->surface_caps.currentExtent.width && format_properties.maxExtent.height >= swapchain->surface_caps.currentExtent.height)
  447. || (swapchain->surface_caps.currentExtent.width == 0xFFFFFFFF)
  448. )
  449. {
  450. if (swapchain->surface_caps.currentExtent.width == 0xFFFFFFFF) {
  451. swapchainExtent.width = os_window->app_data.iResolution[0];
  452. swapchainExtent.height = os_window->app_data.iResolution[1];
  453. if (swapchainExtent.width < swapchain->surface_caps.minImageExtent.width) {
  454. swapchainExtent.width = swapchain->surface_caps.minImageExtent.width;
  455. }
  456. else if (swapchainExtent.width > swapchain->surface_caps.maxImageExtent.width) {
  457. swapchainExtent.width = swapchain->surface_caps.maxImageExtent.width;
  458. }
  459. if (swapchainExtent.height < swapchain->surface_caps.minImageExtent.height) {
  460. swapchainExtent.height = swapchain->surface_caps.minImageExtent.height;
  461. }
  462. else if (swapchainExtent.height > swapchain->surface_caps.maxImageExtent.height) {
  463. swapchainExtent.height = swapchain->surface_caps.maxImageExtent.height;
  464. }
  465. }
  466. else {
  467. swapchainExtent = swapchain->surface_caps.currentExtent;
  468. os_window->app_data.iResolution[0] = swapchain->surface_caps.currentExtent.width;
  469. os_window->app_data.iResolution[1] = swapchain->surface_caps.currentExtent.height;
  470. }
  471. }
  472. else {
  473. printf("Error: too large resolution, currentExtent width, height: %lu, %lu; iResolution.xy: %lu, %lu; maxExtent width, height: %lu, %lu \n",
  474. (unsigned long)swapchain->surface_caps.currentExtent.width, (unsigned long)swapchain->surface_caps.currentExtent.height,
  475. (unsigned long)os_window->app_data.iResolution[0], (unsigned long)os_window->app_data.iResolution[1],
  476. (unsigned long)format_properties.maxExtent.width, (unsigned long)format_properties.maxExtent.height);
  477. os_window->app_data.iResolution[0] = swapchain->surface_caps.currentExtent.width;
  478. os_window->app_data.iResolution[1] = swapchain->surface_caps.currentExtent.height;
  479. if (format_properties.maxExtent.width < swapchain->surface_caps.currentExtent.width)os_window->app_data.iResolution[0] = format_properties.maxExtent.width;
  480. if (format_properties.maxExtent.height < swapchain->surface_caps.currentExtent.height)os_window->app_data.iResolution[1] = format_properties.maxExtent.height;
  481. swapchainExtent.width = os_window->app_data.iResolution[0];
  482. swapchainExtent.height = os_window->app_data.iResolution[1];
  483. }
  484. if (os_window->app_data.iResolution[0] <= 0 || os_window->app_data.iResolution[1] <= 0) {
  485. os_window->is_minimized = true;
  486. return VK_ERROR_NONE;
  487. }
  488. else {
  489. os_window->is_minimized = false;
  490. }
  491. VkSwapchainCreateInfoKHR swapchain_info = {
  492. .sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
  493. .pNext = NULL,
  494. .flags = 0, // bug https://github.com/KhronosGroup/Vulkan-ValidationLayers/issues/4274
  495. .surface = swapchain->surface,
  496. .minImageCount = image_count,
  497. .imageFormat = swapchain->surface_format.format,
  498. .imageColorSpace = swapchain->surface_format.colorSpace,
  499. .imageExtent =
  500. {
  501. .width = swapchainExtent.width,
  502. .height = swapchainExtent.height,
  503. },
  504. .imageArrayLayers = 1,
  505. .imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
  506. .imageSharingMode = VK_SHARING_MODE_EXCLUSIVE,
  507. .preTransform = swapchain->surface_caps.currentTransform,
  508. .compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
  509. .presentMode = *present_mode,
  510. .oldSwapchain = oldSwapchain,
  511. .clipped = true,
  512. };
  513. if (swapchain->surface_caps.supportedUsageFlags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) {
  514. swapchain_info.imageUsage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
  515. }
  516. uint32_t *presentable_queues = NULL;
  517. uint32_t presentable_queue_count = 0;
  518. retval = vk_get_presentable_queues(phy_dev, dev, swapchain->surface, &presentable_queues, &presentable_queue_count);
  519. if (!vk_error_is_success(&retval) || presentable_queue_count == 0)
  520. return retval;
  521. free(presentable_queues);
  522. res = vkCreateSwapchainKHR(dev->device, &swapchain_info, NULL, &swapchain->swapchain);
  523. vk_error_set_vkresult(&retval, res);
  524. if (oldSwapchain != VK_NULL_HANDLE) {
  525. vkDestroySwapchainKHR(dev->device, oldSwapchain, NULL);
  526. }
  527. return retval;
  528. }
  529. void vk_free_swapchain(VkInstance vk, struct vk_device *dev, struct vk_swapchain *swapchain)
  530. {
  531. vkDestroySwapchainKHR(dev->device, swapchain->swapchain, NULL);
  532. vkDestroySurfaceKHR(vk, swapchain->surface, NULL);
  533. *swapchain = (struct vk_swapchain){0};
  534. }
  535. VkImage *vk_get_swapchain_images(struct vk_device *dev, struct vk_swapchain *swapchain, uint32_t *count)
  536. {
  537. vk_error retval = VK_ERROR_NONE;
  538. VkResult res;
  539. uint32_t image_count;
  540. res = vkGetSwapchainImagesKHR(dev->device, swapchain->swapchain, &image_count, NULL);
  541. vk_error_set_vkresult(&retval, res);
  542. if (res < 0)
  543. {
  544. vk_error_printf(&retval, "Failed to count the number of images in swapchain\n");
  545. return NULL;
  546. }
  547. VkImage *images = malloc(image_count * sizeof *images);
  548. if (images == NULL)
  549. {
  550. printf("Out of memory\n");
  551. return NULL;
  552. }
  553. res = vkGetSwapchainImagesKHR(dev->device, swapchain->swapchain, &image_count, images);
  554. vk_error_set_vkresult(&retval, res);
  555. if (res < 0)
  556. {
  557. vk_error_printf(&retval, "Failed to get the images in swapchain\n");
  558. return NULL;
  559. }
  560. if (count)
  561. *count = image_count;
  562. return images;
  563. }
  564. vk_error vk_create_images(struct vk_physical_device *phy_dev, struct vk_device *dev,
  565. struct vk_image *images, uint32_t image_count)
  566. {
  567. uint32_t successful = 0;
  568. vk_error retval = VK_ERROR_NONE;
  569. VkResult res;
  570. for (uint32_t i = 0; i < image_count; ++i)
  571. {
  572. images[i].image = NULL;
  573. images[i].image_mem = NULL;
  574. images[i].view = NULL;
  575. images[i].sampler = NULL;
  576. VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
  577. VkSampleCountFlagBits samples = VK_SAMPLE_COUNT_1_BIT;
  578. VkImageLayout layout = VK_IMAGE_LAYOUT_UNDEFINED;
  579. if (images[i].will_be_initialized || images[i].host_visible)
  580. {
  581. images[i].usage &= VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
  582. layout = VK_IMAGE_LAYOUT_PREINITIALIZED;
  583. tiling = VK_IMAGE_TILING_LINEAR;
  584. }
  585. else if (images[i].multisample)
  586. {
  587. VkImageFormatProperties format_properties;
  588. res = vkGetPhysicalDeviceImageFormatProperties(phy_dev->physical_device, images[i].format, VK_IMAGE_TYPE_2D,
  589. tiling, images[i].usage, 0, &format_properties);
  590. vk_error_sub_set_vkresult(&retval, res);
  591. if (res == 0)
  592. {
  593. for (uint32_t s = VK_SAMPLE_COUNT_16_BIT; s != 0; s >>= 1)
  594. if ((format_properties.sampleCounts & s))
  595. {
  596. samples = s;
  597. break;
  598. }
  599. }
  600. }
  601. uint32_t mipLevels=1;
  602. if(images[i].mipmaps){
  603. mipLevels = (int)(log(MAX(images[i].extent.width, images[i].extent.height))/log(2)) + 1;
  604. }
  605. bool shared = images[i].sharing_queue_count > 1;
  606. struct VkImageCreateInfo image_info = {
  607. .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
  608. .imageType = VK_IMAGE_TYPE_2D,
  609. .format = images[i].format,
  610. .extent = {images[i].extent.width, images[i].extent.height, 1},
  611. .mipLevels = mipLevels,
  612. .arrayLayers = 1,
  613. .samples = samples,
  614. .tiling = tiling,
  615. .usage = images[i].usage,
  616. .sharingMode = shared?VK_SHARING_MODE_CONCURRENT:VK_SHARING_MODE_EXCLUSIVE,
  617. .queueFamilyIndexCount = shared?images[i].sharing_queue_count:0,
  618. .pQueueFamilyIndices = shared?images[i].sharing_queues:NULL,
  619. .initialLayout = layout,
  620. };
  621. res = vkCreateImage(dev->device, &image_info, NULL, &images[i].image);
  622. vk_error_sub_set_vkresult(&retval, res);
  623. if (res)
  624. continue;
  625. VkMemoryRequirements mem_req = {0};
  626. vkGetImageMemoryRequirements(dev->device, images[i].image, &mem_req);
  627. uint32_t mem_index = vk_find_suitable_memory(phy_dev, dev, &mem_req,
  628. images[i].host_visible?
  629. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT:
  630. VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
  631. if (mem_index >= phy_dev->memories.memoryTypeCount)
  632. continue;
  633. VkMemoryAllocateInfo mem_info = {
  634. .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
  635. .allocationSize = mem_req.size,
  636. .memoryTypeIndex = mem_index,
  637. };
  638. res = vkAllocateMemory(dev->device, &mem_info, NULL, &images[i].image_mem);
  639. vk_error_sub_set_vkresult(&retval, res);
  640. if (res)
  641. continue;
  642. res = vkBindImageMemory(dev->device, images[i].image, images[i].image_mem, 0);
  643. vk_error_sub_set_vkresult(&retval, res);
  644. if (res)
  645. continue;
  646. if (images[i].make_view)
  647. {
  648. VkImageViewCreateInfo view_info = {
  649. .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  650. .image = images[i].image,
  651. .viewType = VK_IMAGE_VIEW_TYPE_2D,
  652. .format = images[i].format,
  653. .components = {VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A,},
  654. .subresourceRange = {
  655. .aspectMask = (images[i].usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) == 0?
  656. VK_IMAGE_ASPECT_COLOR_BIT:
  657. VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT,
  658. .baseMipLevel = 0,
  659. .levelCount = VK_REMAINING_MIP_LEVELS,
  660. .baseArrayLayer = 0,
  661. .layerCount = VK_REMAINING_ARRAY_LAYERS,
  662. },
  663. };
  664. res = vkCreateImageView(dev->device, &view_info, NULL, &images[i].view);
  665. vk_error_sub_set_vkresult(&retval, res);
  666. if (res)
  667. continue;
  668. }
  669. if ((images[i].usage & VK_IMAGE_USAGE_SAMPLED_BIT))
  670. {
  671. VkSamplerCreateInfo sampler_info = {
  672. .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
  673. .magFilter = images[i].linear?VK_FILTER_LINEAR:VK_FILTER_NEAREST,
  674. .minFilter = images[i].linear?VK_FILTER_LINEAR:VK_FILTER_NEAREST,
  675. .mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR,
  676. .addressModeU = images[i].repeat_mode,
  677. .addressModeV = images[i].repeat_mode,
  678. .addressModeW = images[i].repeat_mode,
  679. .anisotropyEnable = images[i].anisotropyEnable && phy_dev->features.samplerAnisotropy, // false
  680. .maxAnisotropy = phy_dev->properties.limits.maxSamplerAnisotropy,
  681. .minLod = 0,
  682. .maxLod = 1,
  683. };
  684. if(images[i].mipmaps){
  685. sampler_info.maxLod = mipLevels;
  686. sampler_info.mipLodBias = 0;
  687. sampler_info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
  688. sampler_info.compareOp = VK_COMPARE_OP_ALWAYS;
  689. }
  690. res = vkCreateSampler(dev->device, &sampler_info, NULL, &images[i].sampler);
  691. vk_error_sub_set_vkresult(&retval, res);
  692. if (res)
  693. continue;
  694. }
  695. ++successful;
  696. }
  697. vk_error_set_vkresult(&retval, successful == image_count?VK_SUCCESS:VK_INCOMPLETE);
  698. return retval;
  699. }
  700. vk_error vk_create_buffers(struct vk_physical_device *phy_dev, struct vk_device *dev,
  701. struct vk_buffer *buffers, uint32_t buffer_count)
  702. {
  703. uint32_t successful = 0;
  704. vk_error retval = VK_ERROR_NONE;
  705. VkResult res;
  706. for (uint32_t i = 0; i < buffer_count; ++i)
  707. {
  708. buffers[i].buffer = NULL;
  709. buffers[i].buffer_mem = NULL;
  710. buffers[i].view = NULL;
  711. bool shared = buffers[i].sharing_queue_count > 1;
  712. VkBufferCreateInfo buffer_info = {
  713. .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
  714. .size = buffers[i].size * sizeof(float),
  715. .usage = buffers[i].usage,
  716. .sharingMode = shared?VK_SHARING_MODE_CONCURRENT:VK_SHARING_MODE_EXCLUSIVE,
  717. .queueFamilyIndexCount = shared?buffers[i].sharing_queue_count:0,
  718. .pQueueFamilyIndices = shared?buffers[i].sharing_queues:NULL,
  719. };
  720. res = vkCreateBuffer(dev->device, &buffer_info, NULL, &buffers[i].buffer);
  721. vk_error_sub_set_vkresult(&retval, res);
  722. if (res)
  723. continue;
  724. VkMemoryRequirements mem_req = {0};
  725. vkGetBufferMemoryRequirements(dev->device, buffers[i].buffer, &mem_req);
  726. uint32_t mem_index = vk_find_suitable_memory(phy_dev, dev, &mem_req,
  727. buffers[i].host_visible?
  728. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT:
  729. VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
  730. if (mem_index >= phy_dev->memories.memoryTypeCount)
  731. continue;
  732. VkMemoryAllocateInfo mem_info = {
  733. .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
  734. .allocationSize = mem_req.size,
  735. .memoryTypeIndex = mem_index,
  736. };
  737. res = vkAllocateMemory(dev->device, &mem_info, NULL, &buffers[i].buffer_mem);
  738. vk_error_sub_set_vkresult(&retval, res);
  739. if (res)
  740. continue;
  741. res = vkBindBufferMemory(dev->device, buffers[i].buffer, buffers[i].buffer_mem, 0);
  742. vk_error_sub_set_vkresult(&retval, res);
  743. if (res)
  744. continue;
  745. if (buffers[i].make_view)
  746. {
  747. if ((buffers[i].usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) || (buffers[i].usage & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT))
  748. {
  749. VkBufferViewCreateInfo view_info = {
  750. .sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
  751. .buffer = buffers[i].buffer,
  752. .format = buffers[i].format,
  753. .offset = 0,
  754. .range = VK_WHOLE_SIZE,
  755. };
  756. res = vkCreateBufferView(dev->device, &view_info, NULL, &buffers[i].view);
  757. vk_error_sub_set_vkresult(&retval, res);
  758. if (res)
  759. continue;
  760. }
  761. }
  762. ++successful;
  763. }
  764. vk_error_set_vkresult(&retval, successful == buffer_count?VK_SUCCESS:VK_INCOMPLETE);
  765. return retval;
  766. }
  767. vk_error vk_load_shaders(struct vk_device *dev,
  768. struct vk_shader *shaders, uint32_t shader_count)
  769. {
  770. uint32_t successful = 0;
  771. vk_error retval = VK_ERROR_NONE;
  772. vk_error err;
  773. for (uint32_t i = 0; i < shader_count; ++i)
  774. {
  775. err = vk_load_shader_spirv_file(dev, shaders[i].spirv_file, &shaders[i].shader);
  776. vk_error_sub_merge(&retval, &err);
  777. if (!vk_error_is_success(&err))
  778. continue;
  779. ++successful;
  780. }
  781. vk_error_set_vkresult(&retval, successful == shader_count?VK_SUCCESS:VK_INCOMPLETE);
  782. return retval;
  783. }
  784. vk_error vk_get_presentable_queues(struct vk_physical_device *phy_dev, struct vk_device *dev,
  785. VkSurfaceKHR surface, uint32_t **presentable_queues, uint32_t *presentable_queue_count)
  786. {
  787. vk_error retval = VK_ERROR_NONE;
  788. VkResult res;
  789. *presentable_queues = malloc(dev->command_pool_count * sizeof **presentable_queues);
  790. if (*presentable_queues == NULL)
  791. {
  792. vk_error_set_errno(&retval, errno);
  793. return retval;
  794. }
  795. *presentable_queue_count = 0;
  796. for (uint32_t i = 0; i < dev->command_pool_count; ++i)
  797. {
  798. VkBool32 supports = false;
  799. res = vkGetPhysicalDeviceSurfaceSupportKHR(phy_dev->physical_device, i, surface, &supports);
  800. vk_error_sub_set_vkresult(&retval, res);
  801. if (res || !supports)
  802. continue;
  803. (*presentable_queues)[(*presentable_queue_count)++] = i;
  804. }
  805. if (*presentable_queue_count == 0)
  806. {
  807. free(*presentable_queues);
  808. *presentable_queues = NULL;
  809. }
  810. vk_error_set_vkresult(&retval, *presentable_queue_count == 0?VK_ERROR_INCOMPATIBLE_DRIVER:VK_SUCCESS);
  811. return retval;
  812. }
  813. VkFormat vk_get_supported_depth_stencil_format(struct vk_physical_device *phy_dev)
  814. {
  815. VkFormat depth_formats[] = {
  816. VK_FORMAT_D32_SFLOAT_S8_UINT,
  817. VK_FORMAT_D32_SFLOAT,
  818. VK_FORMAT_D24_UNORM_S8_UINT,
  819. VK_FORMAT_X8_D24_UNORM_PACK32,
  820. VK_FORMAT_D16_UNORM,
  821. };
  822. VkFormat selected_format = VK_FORMAT_UNDEFINED;
  823. for (size_t i = 0; i < sizeof depth_formats / sizeof *depth_formats; ++i)
  824. {
  825. VkFormatProperties format_properties;
  826. vkGetPhysicalDeviceFormatProperties(phy_dev->physical_device, depth_formats[i], &format_properties);
  827. if ((format_properties.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))
  828. {
  829. selected_format = depth_formats[i];
  830. break;
  831. }
  832. }
  833. return selected_format;
  834. }
  835. void vk_free_images(struct vk_device *dev, struct vk_image *images, uint32_t image_count)
  836. {
  837. vkDeviceWaitIdle(dev->device);
  838. for (uint32_t i = 0; i < image_count; ++i)
  839. {
  840. if(images[i].view)vkDestroyImageView(dev->device, images[i].view, NULL);
  841. vkDestroyImage(dev->device, images[i].image, NULL);
  842. vkFreeMemory(dev->device, images[i].image_mem, NULL);
  843. if(images[i].sampler)vkDestroySampler(dev->device, images[i].sampler, NULL);
  844. }
  845. }
  846. void vk_free_buffers(struct vk_device *dev, struct vk_buffer *buffers, uint32_t buffer_count)
  847. {
  848. vkDeviceWaitIdle(dev->device);
  849. for (uint32_t i = 0; i < buffer_count; ++i)
  850. {
  851. vkDestroyBufferView(dev->device, buffers[i].view, NULL);
  852. vkDestroyBuffer(dev->device, buffers[i].buffer, NULL);
  853. vkFreeMemory(dev->device, buffers[i].buffer_mem, NULL);
  854. }
  855. }
  856. void vk_free_shaders(struct vk_device *dev, struct vk_shader *shaders, uint32_t shader_count)
  857. {
  858. vkDeviceWaitIdle(dev->device);
  859. for (uint32_t i = 0; i < shader_count; ++i)
  860. vk_free_shader(dev, shaders[i].shader);
  861. }
  862. void vk_free_graphics_buffers(struct vk_device *dev, struct vk_graphics_buffers *graphics_buffers, uint32_t graphics_buffer_count,
  863. VkRenderPass render_pass)
  864. {
  865. vkDeviceWaitIdle(dev->device);
  866. for (uint32_t i = 0; i < graphics_buffer_count; ++i)
  867. {
  868. vk_free_images(dev, &graphics_buffers[i].depth, 1);
  869. vkDestroyImageView(dev->device, graphics_buffers[i].color_view, NULL);
  870. vkDestroyFramebuffer(dev->device, graphics_buffers[i].framebuffer, NULL);
  871. }
  872. vkDestroyRenderPass(dev->device, render_pass, NULL);
  873. }
  874. vk_error vk_make_graphics_layouts(struct vk_device *dev, struct vk_layout *layouts, uint32_t layout_count,
  875. bool w_img_pattern, uint32_t *img_pattern, uint32_t img_pattern_size)
  876. {
  877. uint32_t successful = 0;
  878. vk_error retval = VK_ERROR_NONE;
  879. VkResult res;
  880. for (uint32_t i = 0; i < layout_count; ++i)
  881. {
  882. struct vk_layout *layout = &layouts[i];
  883. struct vk_resources *resources = layout->resources;
  884. layout->set_layout = NULL;
  885. layout->pipeline_layout = NULL;
  886. VkDescriptorSetLayoutBinding *set_layout_bindings;
  887. if(w_img_pattern){
  888. set_layout_bindings = malloc((img_pattern_size + resources->buffer_count)*sizeof(VkDescriptorSetLayoutBinding));
  889. }else{
  890. set_layout_bindings = malloc((resources->image_count + resources->buffer_count)*sizeof(VkDescriptorSetLayoutBinding));
  891. }
  892. uint32_t binding_count = 0;
  893. uint32_t tidx=0;
  894. for (uint32_t j = 0; j < (w_img_pattern?img_pattern_size:resources->image_count); ++j)
  895. {
  896. if ((resources->images[(w_img_pattern?tidx:j)].usage & (VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_SAMPLED_BIT)) == 0)
  897. continue;
  898. tidx=0;
  899. if(w_img_pattern){
  900. for(int tj=0;tj<j;tj++)tidx+=img_pattern[tj];
  901. }
  902. set_layout_bindings[binding_count] = (VkDescriptorSetLayoutBinding){
  903. .binding = w_img_pattern?tidx:binding_count,
  904. .descriptorType = resources->images[(w_img_pattern?tidx:j)].usage & VK_IMAGE_USAGE_SAMPLED_BIT?
  905. VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
  906. VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
  907. .descriptorCount = (w_img_pattern?img_pattern[j]:1),
  908. .stageFlags = resources->images[j].stage,
  909. };
  910. ++binding_count;
  911. }
  912. for (uint32_t j = 0; j < resources->buffer_count; ++j)
  913. {
  914. if ((resources->buffers[j].usage & (VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT)) == 0)
  915. continue;
  916. if(w_img_pattern)tidx++;
  917. set_layout_bindings[binding_count] = (VkDescriptorSetLayoutBinding){
  918. .binding = w_img_pattern?tidx:binding_count,
  919. .descriptorType = resources->buffers[j].usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT?
  920. VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
  921. VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
  922. .descriptorCount = 1,
  923. .stageFlags = resources->buffers[j].stage,
  924. };
  925. ++binding_count;
  926. }
  927. VkDescriptorSetLayoutCreateInfo set_layout_info = {
  928. .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
  929. .bindingCount = binding_count,
  930. .pBindings = set_layout_bindings,
  931. };
  932. res = vkCreateDescriptorSetLayout(dev->device, &set_layout_info, NULL, &layout->set_layout);
  933. vk_error_sub_set_vkresult(&retval, res);
  934. if (res) {
  935. free(set_layout_bindings);
  936. set_layout_bindings = NULL;
  937. continue;
  938. }
  939. VkPipelineLayoutCreateInfo pipeline_layout_info = {
  940. .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
  941. .setLayoutCount = 1,
  942. .pSetLayouts = &layout->set_layout,
  943. .pushConstantRangeCount = resources->push_constant_count,
  944. .pPushConstantRanges = resources->push_constants,
  945. };
  946. res = vkCreatePipelineLayout(dev->device, &pipeline_layout_info, NULL, &layout->pipeline_layout);
  947. vk_error_sub_set_vkresult(&retval, res);
  948. if (res) {
  949. free(set_layout_bindings);
  950. set_layout_bindings = NULL;
  951. continue;
  952. }
  953. free(set_layout_bindings);
  954. set_layout_bindings = NULL;
  955. ++successful;
  956. }
  957. vk_error_set_vkresult(&retval, successful == layout_count?VK_SUCCESS:VK_INCOMPLETE);
  958. return retval;
  959. }
  960. vk_error vk_make_graphics_pipelines(struct vk_device *dev, struct vk_pipeline *pipelines, uint32_t pipeline_count, bool is_blend)
  961. {
  962. uint32_t successful = 0;
  963. vk_error retval = VK_ERROR_NONE;
  964. VkResult res;
  965. for (uint32_t i = 0; i < pipeline_count; ++i)
  966. {
  967. struct vk_pipeline* pipeline = &pipelines[i];
  968. struct vk_layout* layout = pipeline->layout;
  969. struct vk_resources* resources = layout->resources;
  970. pipeline->pipeline = NULL;
  971. pipeline->set_pool = NULL;
  972. VkGraphicsPipelineCreateInfo pipeline_info;
  973. bool has_tessellation_shader = false;
  974. VkPipelineShaderStageCreateInfo* stage_info;
  975. stage_info = malloc(resources->shader_count * sizeof(VkPipelineShaderStageCreateInfo));
  976. for (uint32_t j = 0; j < resources->shader_count; ++j)
  977. {
  978. struct vk_shader* shader = &resources->shaders[j];
  979. stage_info[j] = (VkPipelineShaderStageCreateInfo){
  980. .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
  981. .stage = shader->stage,
  982. .module = shader->shader,
  983. .pName = "main",
  984. };
  985. if (shader->stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || shader->stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
  986. has_tessellation_shader = true;
  987. }
  988. VkPipelineViewportStateCreateInfo viewport_state = {
  989. .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
  990. .viewportCount = 1,
  991. .scissorCount = 1,
  992. };
  993. VkPipelineRasterizationStateCreateInfo rasterization_state = {
  994. .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
  995. .polygonMode = VK_POLYGON_MODE_FILL,
  996. .cullMode = VK_CULL_MODE_BACK_BIT,
  997. .frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE,
  998. .lineWidth = 1,
  999. };
  1000. VkPipelineMultisampleStateCreateInfo multisample_state = {
  1001. .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
  1002. .rasterizationSamples = VK_SAMPLE_COUNT_1_BIT,
  1003. };
  1004. VkPipelineDepthStencilStateCreateInfo depth_stencil_state = {
  1005. .sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
  1006. .depthTestEnable = true,
  1007. .depthWriteEnable = true,
  1008. .depthCompareOp = VK_COMPARE_OP_GREATER_OR_EQUAL,
  1009. };
  1010. VkPipelineColorBlendAttachmentState color_blend_attachments[1] = {
  1011. [0] = {
  1012. .blendEnable = is_blend,
  1013. .colorWriteMask = VK_COLOR_COMPONENT_R_BIT
  1014. | VK_COLOR_COMPONENT_G_BIT
  1015. | VK_COLOR_COMPONENT_B_BIT
  1016. | VK_COLOR_COMPONENT_A_BIT,
  1017. },
  1018. };
  1019. if(is_blend){
  1020. color_blend_attachments[0].srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
  1021. color_blend_attachments[0].dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
  1022. color_blend_attachments[0].colorBlendOp = VK_BLEND_OP_ADD;
  1023. color_blend_attachments[0].srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE;
  1024. color_blend_attachments[0].dstAlphaBlendFactor = VK_BLEND_FACTOR_ZERO;
  1025. color_blend_attachments[0].alphaBlendOp = VK_BLEND_OP_ADD;
  1026. }
  1027. VkPipelineColorBlendStateCreateInfo color_blend_state = {
  1028. .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
  1029. .attachmentCount = 1,
  1030. .pAttachments = color_blend_attachments,
  1031. };
  1032. VkDynamicState dynamic_states[2] = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR };
  1033. VkPipelineDynamicStateCreateInfo dynamic_state = {
  1034. .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
  1035. .dynamicStateCount = 2,
  1036. .pDynamicStates = dynamic_states,
  1037. };
  1038. pipeline_info = (VkGraphicsPipelineCreateInfo){
  1039. .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
  1040. .flags = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
  1041. .stageCount = resources->shader_count,
  1042. .pStages = stage_info,
  1043. .pVertexInputState = &pipeline->vertex_input_state,
  1044. .pInputAssemblyState = &pipeline->input_assembly_state,
  1045. .pTessellationState = has_tessellation_shader ? &pipeline->tessellation_state : NULL,
  1046. .pViewportState = &viewport_state,
  1047. .pRasterizationState = &rasterization_state,
  1048. .pMultisampleState = &multisample_state,
  1049. .pDepthStencilState = &depth_stencil_state,
  1050. .pColorBlendState = &color_blend_state,
  1051. .pDynamicState = &dynamic_state,
  1052. .layout = layout->pipeline_layout,
  1053. .renderPass = resources->render_pass,
  1054. .subpass = 0,
  1055. .basePipelineIndex = 0,
  1056. };
  1057. res = vkCreateGraphicsPipelines(dev->device, NULL, 1, &pipeline_info, NULL, &pipeline->pipeline);
  1058. vk_error_sub_set_vkresult(&retval, res);
  1059. if (res) {
  1060. free(stage_info);
  1061. stage_info = NULL;
  1062. continue;
  1063. }
  1064. uint32_t image_sampler_count = 0;
  1065. uint32_t storage_image_count = 0;
  1066. uint32_t uniform_buffer_count = 0;
  1067. uint32_t storage_buffer_count = 0;
  1068. for (uint32_t j = 0; j < resources->image_count; ++j)
  1069. {
  1070. if ((resources->images[j].usage & VK_IMAGE_USAGE_SAMPLED_BIT))
  1071. ++image_sampler_count;
  1072. else if ((resources->images[j].usage & VK_IMAGE_USAGE_SAMPLED_BIT))
  1073. ++storage_image_count;
  1074. }
  1075. for (uint32_t j = 0; j < resources->buffer_count; ++j)
  1076. {
  1077. if ((resources->buffers[j].usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT))
  1078. ++uniform_buffer_count;
  1079. else if ((resources->buffers[j].usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT))
  1080. ++storage_buffer_count;
  1081. }
  1082. uint32_t pool_size_count = 0;
  1083. VkDescriptorPoolSize pool_sizes[4];
  1084. if (image_sampler_count > 0)
  1085. pool_sizes[pool_size_count++] = (VkDescriptorPoolSize){
  1086. .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
  1087. .descriptorCount = pipeline->thread_count * image_sampler_count,
  1088. };
  1089. if (storage_image_count > 0)
  1090. pool_sizes[pool_size_count++] = (VkDescriptorPoolSize){
  1091. .type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
  1092. .descriptorCount = pipeline->thread_count * storage_image_count,
  1093. };
  1094. if (uniform_buffer_count > 0)
  1095. pool_sizes[pool_size_count++] = (VkDescriptorPoolSize){
  1096. .type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
  1097. .descriptorCount = pipeline->thread_count * uniform_buffer_count,
  1098. };
  1099. if (storage_buffer_count > 0)
  1100. pool_sizes[pool_size_count++] = (VkDescriptorPoolSize){
  1101. .type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
  1102. .descriptorCount = pipeline->thread_count * storage_image_count,
  1103. };
  1104. VkDescriptorPoolCreateInfo set_info = {
  1105. .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
  1106. .maxSets = pipeline->thread_count,
  1107. .poolSizeCount = pool_size_count,
  1108. .pPoolSizes = pool_sizes,
  1109. };
  1110. res = vkCreateDescriptorPool(dev->device, &set_info, NULL, &pipeline->set_pool);
  1111. vk_error_sub_set_vkresult(&retval, res);
  1112. if (res) {
  1113. free(stage_info);
  1114. stage_info = NULL;
  1115. continue;
  1116. }
  1117. free(stage_info);
  1118. stage_info = NULL;
  1119. ++successful;
  1120. }
  1121. vk_error_set_vkresult(&retval, successful == pipeline_count?VK_SUCCESS:VK_INCOMPLETE);
  1122. return retval;
  1123. }
  1124. void vk_free_layouts(struct vk_device *dev, struct vk_layout *layouts, uint32_t layout_count)
  1125. {
  1126. vkDeviceWaitIdle(dev->device);
  1127. for (uint32_t i = 0; i < layout_count; ++i)
  1128. {
  1129. vkDestroyPipelineLayout(dev->device, layouts[i].pipeline_layout, NULL);
  1130. vkDestroyDescriptorSetLayout(dev->device, layouts[i].set_layout, NULL);
  1131. }
  1132. }
  1133. void vk_free_pipelines(struct vk_device *dev, struct vk_pipeline *pipelines, uint32_t pipeline_count)
  1134. {
  1135. vkDeviceWaitIdle(dev->device);
  1136. for (uint32_t i = 0; i < pipeline_count; ++i)
  1137. {
  1138. vkDestroyPipeline(dev->device, pipelines[i].pipeline, NULL);
  1139. vkDestroyDescriptorPool(dev->device, pipelines[i].set_pool, NULL);
  1140. }
  1141. }
  1142. static vk_error create_render_pass(struct vk_device *dev, VkFormat color_format, VkFormat depth_format, VkRenderPass *render_pass,
  1143. enum vk_render_pass_load_op keeps_contents, enum vk_make_depth_buffer has_depth)
  1144. {
  1145. vk_error retval = VK_ERROR_NONE;
  1146. VkResult res;
  1147. VkAttachmentDescription render_pass_attachments[2] = {
  1148. [0] = {
  1149. .format = color_format,
  1150. .samples = VK_SAMPLE_COUNT_1_BIT,
  1151. .loadOp = keeps_contents?VK_ATTACHMENT_LOAD_OP_LOAD:VK_ATTACHMENT_LOAD_OP_CLEAR,
  1152. .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
  1153. .initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  1154. .finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  1155. },
  1156. [1] = {
  1157. .format = depth_format,
  1158. .samples = VK_SAMPLE_COUNT_1_BIT,
  1159. //.loadOp = keeps_contents?VK_ATTACHMENT_LOAD_OP_LOAD:VK_ATTACHMENT_LOAD_OP_CLEAR,
  1160. .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
  1161. .storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
  1162. .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
  1163. .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
  1164. .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
  1165. .finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  1166. },
  1167. };
  1168. VkAttachmentReference render_pass_attachment_references[2] = {
  1169. [0] = {
  1170. .attachment = 0,
  1171. .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  1172. },
  1173. [1] = {
  1174. .attachment = 1,
  1175. .layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  1176. },
  1177. };
  1178. VkSubpassDescription render_pass_subpasses[1] = {
  1179. [0] = {
  1180. .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
  1181. .colorAttachmentCount = 1,
  1182. .pColorAttachments = &render_pass_attachment_references[0],
  1183. .pDepthStencilAttachment = has_depth?&render_pass_attachment_references[1]:NULL,
  1184. },
  1185. };
  1186. VkRenderPassCreateInfo render_pass_info = {
  1187. .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
  1188. .attachmentCount = has_depth?2:1,
  1189. .pAttachments = render_pass_attachments,
  1190. .subpassCount = 1,
  1191. .pSubpasses = render_pass_subpasses,
  1192. };
  1193. res = vkCreateRenderPass(dev->device, &render_pass_info, NULL, render_pass);
  1194. vk_error_set_vkresult(&retval, res);
  1195. return retval;
  1196. }
  1197. vk_error vk_create_offscreen_buffers(struct vk_physical_device *phy_dev, struct vk_device *dev, VkFormat format,
  1198. struct vk_offscreen_buffers *offscreen_buffers, uint32_t offscreen_buffer_count, VkRenderPass *render_pass,
  1199. enum vk_render_pass_load_op keeps_contents, enum vk_make_depth_buffer has_depth, bool linear)
  1200. {
  1201. uint32_t successful = 0;
  1202. vk_error retval = VK_ERROR_NONE;
  1203. VkResult res;
  1204. vk_error err;
  1205. VkImageFormatProperties format_properties;
  1206. res = vkGetPhysicalDeviceImageFormatProperties(phy_dev->physical_device, format, VK_IMAGE_TYPE_2D,
  1207. VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT, 0, &format_properties);
  1208. vk_error_sub_set_vkresult(&retval, res);
  1209. if (res != VK_SUCCESS)return retval;
  1210. for (uint32_t i = 0; i < offscreen_buffer_count; ++i)
  1211. {
  1212. offscreen_buffers[i].color= (struct vk_image){0};
  1213. offscreen_buffers[i].depth = (struct vk_image){0};
  1214. offscreen_buffers[i].framebuffer = NULL;
  1215. if (format_properties.maxExtent.width < offscreen_buffers[i].surface_size.width){
  1216. offscreen_buffers[i].surface_size.width=format_properties.maxExtent.width;
  1217. }
  1218. if (format_properties.maxExtent.height < offscreen_buffers[i].surface_size.height){
  1219. offscreen_buffers[i].surface_size.height=format_properties.maxExtent.height;
  1220. }
  1221. }
  1222. VkFormat depth_format = vk_get_supported_depth_stencil_format(phy_dev);
  1223. retval = create_render_pass(dev, format, depth_format, render_pass, keeps_contents, has_depth);
  1224. if (!vk_error_is_success(&retval))
  1225. return retval;
  1226. for (uint32_t i = 0; i < offscreen_buffer_count; ++i)
  1227. {
  1228. offscreen_buffers[i].color = (struct vk_image){
  1229. .format = format,
  1230. .extent = offscreen_buffers[i].surface_size,
  1231. .usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
  1232. | VK_IMAGE_USAGE_SAMPLED_BIT,
  1233. .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
  1234. .make_view = true,
  1235. .anisotropyEnable = true,
  1236. .repeat_mode = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
  1237. .mipmaps = false,
  1238. .linear = linear,
  1239. };
  1240. err = vk_create_images(phy_dev, dev, &offscreen_buffers[i].color, 1);
  1241. vk_error_sub_merge(&retval, &err);
  1242. if (!vk_error_is_success(&err))
  1243. continue;
  1244. if (has_depth)
  1245. {
  1246. offscreen_buffers[i].depth = (struct vk_image){
  1247. .format = depth_format,
  1248. .extent = offscreen_buffers[i].surface_size,
  1249. .usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
  1250. .make_view = true,
  1251. .anisotropyEnable = true,
  1252. .repeat_mode = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
  1253. .mipmaps = false,
  1254. .linear = false,
  1255. };
  1256. err = vk_create_images(phy_dev, dev, &offscreen_buffers[i].depth, 1);
  1257. vk_error_sub_merge(&retval, &err);
  1258. if (!vk_error_is_success(&err))
  1259. continue;
  1260. }
  1261. VkImageView framebuffer_attachments[2] = {
  1262. offscreen_buffers[i].color.view,
  1263. offscreen_buffers[i].depth.view,
  1264. };
  1265. VkFramebufferCreateInfo framebuffer_info = {
  1266. .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
  1267. .renderPass = *render_pass,
  1268. .attachmentCount = has_depth?2:1,
  1269. .pAttachments = framebuffer_attachments,
  1270. .width = offscreen_buffers[i].surface_size.width,
  1271. .height = offscreen_buffers[i].surface_size.height,
  1272. .layers = 1,
  1273. };
  1274. res = vkCreateFramebuffer(dev->device, &framebuffer_info, NULL, &offscreen_buffers[i].framebuffer);
  1275. vk_error_sub_set_vkresult(&retval, res);
  1276. if (res)
  1277. continue;
  1278. ++successful;
  1279. }
  1280. vk_error_set_vkresult(&retval, successful == offscreen_buffer_count?VK_SUCCESS:VK_INCOMPLETE);
  1281. return retval;
  1282. }
  1283. vk_error vk_create_graphics_buffers(struct vk_physical_device *phy_dev, struct vk_device *dev, VkFormat format,
  1284. struct vk_graphics_buffers *graphics_buffers, uint32_t graphics_buffer_count, VkRenderPass *render_pass,
  1285. enum vk_render_pass_load_op keeps_contents, enum vk_make_depth_buffer has_depth)
  1286. {
  1287. uint32_t successful = 0;
  1288. vk_error retval = VK_ERROR_NONE;
  1289. VkResult res;
  1290. vk_error err;
  1291. VkImageFormatProperties format_properties;
  1292. res = vkGetPhysicalDeviceImageFormatProperties(phy_dev->physical_device, format, VK_IMAGE_TYPE_2D,
  1293. VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT, 0, &format_properties);
  1294. vk_error_sub_set_vkresult(&retval, res);
  1295. if (res != VK_SUCCESS)return retval;
  1296. for (uint32_t i = 0; i < graphics_buffer_count; ++i)
  1297. {
  1298. graphics_buffers[i].color_view = NULL;
  1299. graphics_buffers[i].depth = (struct vk_image){0};
  1300. graphics_buffers[i].framebuffer = NULL;
  1301. if (format_properties.maxExtent.width < graphics_buffers[i].surface_size.width){
  1302. graphics_buffers[i].surface_size.width=format_properties.maxExtent.width;
  1303. }
  1304. if (format_properties.maxExtent.height < graphics_buffers[i].surface_size.height){
  1305. graphics_buffers[i].surface_size.height=format_properties.maxExtent.height;
  1306. }
  1307. }
  1308. VkFormat depth_format = vk_get_supported_depth_stencil_format(phy_dev);;
  1309. retval = create_render_pass(dev, format, depth_format, render_pass, keeps_contents, has_depth);
  1310. if (!vk_error_is_success(&retval))
  1311. return retval;
  1312. for (uint32_t i = 0; i < graphics_buffer_count; ++i)
  1313. {
  1314. VkImageViewCreateInfo view_info = {
  1315. .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  1316. .image = graphics_buffers[i].swapchain_image,
  1317. .viewType = VK_IMAGE_VIEW_TYPE_2D,
  1318. .format = format,
  1319. .components = {VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A,},
  1320. .subresourceRange = {
  1321. .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
  1322. .baseMipLevel = 0,
  1323. .levelCount = VK_REMAINING_MIP_LEVELS,
  1324. .baseArrayLayer = 0,
  1325. .layerCount = VK_REMAINING_ARRAY_LAYERS,
  1326. },
  1327. };
  1328. res = vkCreateImageView(dev->device, &view_info, NULL, &graphics_buffers[i].color_view);
  1329. vk_error_sub_set_vkresult(&retval, res);
  1330. if (res)
  1331. continue;
  1332. if (has_depth)
  1333. {
  1334. graphics_buffers[i].depth = (struct vk_image){
  1335. .format = depth_format,
  1336. .extent = graphics_buffers[i].surface_size,
  1337. .usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
  1338. .make_view = true,
  1339. .multisample = false,
  1340. .will_be_initialized = false,
  1341. .anisotropyEnable = true,
  1342. .repeat_mode = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
  1343. .mipmaps = false,
  1344. .linear = false,
  1345. };
  1346. err = vk_create_images(phy_dev, dev, &graphics_buffers[i].depth, 1);
  1347. vk_error_sub_merge(&retval, &err);
  1348. if (!vk_error_is_success(&err))
  1349. continue;
  1350. }
  1351. VkImageView framebuffer_attachments[2] = {
  1352. graphics_buffers[i].color_view,
  1353. graphics_buffers[i].depth.view,
  1354. };
  1355. VkFramebufferCreateInfo framebuffer_info = {
  1356. .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
  1357. .renderPass = *render_pass,
  1358. .attachmentCount = has_depth?2:1,
  1359. .pAttachments = framebuffer_attachments,
  1360. .width = graphics_buffers[i].surface_size.width,
  1361. .height = graphics_buffers[i].surface_size.height,
  1362. .layers = 1,
  1363. };
  1364. res = vkCreateFramebuffer(dev->device, &framebuffer_info, NULL, &graphics_buffers[i].framebuffer);
  1365. vk_error_sub_set_vkresult(&retval, res);
  1366. if (res)
  1367. continue;
  1368. ++successful;
  1369. }
  1370. vk_error_set_vkresult(&retval, successful == graphics_buffer_count?VK_SUCCESS:VK_INCOMPLETE);
  1371. return retval;
  1372. }
  1373. void vk_free_offscreen_buffers(struct vk_device *dev, struct vk_offscreen_buffers *offscreen_buffers, uint32_t graphics_buffer_count,
  1374. VkRenderPass render_pass)
  1375. {
  1376. vkDeviceWaitIdle(dev->device);
  1377. for (uint32_t i = 0; i < graphics_buffer_count; ++i)
  1378. {
  1379. vk_free_images(dev, &offscreen_buffers[i].color, 1);
  1380. vk_free_images(dev, &offscreen_buffers[i].depth, 1);
  1381. vkDestroyFramebuffer(dev->device, offscreen_buffers[i].framebuffer, NULL);
  1382. }
  1383. vkDestroyRenderPass(dev->device, render_pass, NULL);
  1384. }