Platformer in OpenGL
Nie możesz wybrać więcej, niż 25 tematów Tematy muszą się zaczynać od litery lub cyfry, mogą zawierać myślniki ('-') i mogą mieć do 35 znaków.

2245 wiersze
87 KiB

5 lat temu
  1. /*
  2. * Copyright (c) 2015-2016 The Khronos Group Inc.
  3. * Copyright (c) 2015-2016 Valve Corporation
  4. * Copyright (c) 2015-2016 LunarG, Inc.
  5. *
  6. * Permission is hereby granted, free of charge, to any person obtaining a copy
  7. * of this software and/or associated documentation files (the "Materials"), to
  8. * deal in the Materials without restriction, including without limitation the
  9. * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
  10. * sell copies of the Materials, and to permit persons to whom the Materials are
  11. * furnished to do so, subject to the following conditions:
  12. *
  13. * The above copyright notice(s) and this permission notice shall be included in
  14. * all copies or substantial portions of the Materials.
  15. *
  16. * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  17. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  18. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
  19. *
  20. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
  21. * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
  22. * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE
  23. * USE OR OTHER DEALINGS IN THE MATERIALS.
  24. *
  25. * Author: Chia-I Wu <olvaffe@gmail.com>
  26. * Author: Cody Northrop <cody@lunarg.com>
  27. * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
  28. * Author: Ian Elliott <ian@LunarG.com>
  29. * Author: Jon Ashburn <jon@lunarg.com>
  30. * Author: Piers Daniell <pdaniell@nvidia.com>
  31. */
  32. /*
  33. * Draw a textured triangle with depth testing. This is written against Intel
  34. * ICD. It does not do state transition nor object memory binding like it
  35. * should. It also does no error checking.
  36. */
  37. #ifndef _MSC_VER
  38. #define _ISOC11_SOURCE /* for aligned_alloc() */
  39. #endif
  40. #include <stdio.h>
  41. #include <stdlib.h>
  42. #include <string.h>
  43. #include <stdbool.h>
  44. #include <assert.h>
  45. #include <vulkan/vulkan.h>
  46. #include <GLFW/glfw3.h>
  47. #define DEMO_TEXTURE_COUNT 1
  48. #define VERTEX_BUFFER_BIND_ID 0
  49. #define APP_SHORT_NAME "vulkan"
  50. #define APP_LONG_NAME "The Vulkan Triangle Demo Program"
  51. #define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
  52. #if defined(NDEBUG) && defined(__GNUC__)
  53. #define U_ASSERT_ONLY __attribute__((unused))
  54. #else
  55. #define U_ASSERT_ONLY
  56. #endif
  57. #define ERR_EXIT(err_msg, err_class) \
  58. do { \
  59. printf(err_msg); \
  60. fflush(stdout); \
  61. exit(1); \
  62. } while (0)
  63. #define GET_INSTANCE_PROC_ADDR(inst, entrypoint) \
  64. { \
  65. demo->fp##entrypoint = \
  66. (PFN_vk##entrypoint)vkGetInstanceProcAddr(inst, "vk" #entrypoint); \
  67. if (demo->fp##entrypoint == NULL) { \
  68. ERR_EXIT("vkGetInstanceProcAddr failed to find vk" #entrypoint, \
  69. "vkGetInstanceProcAddr Failure"); \
  70. } \
  71. }
  72. #define GET_DEVICE_PROC_ADDR(dev, entrypoint) \
  73. { \
  74. demo->fp##entrypoint = \
  75. (PFN_vk##entrypoint)vkGetDeviceProcAddr(dev, "vk" #entrypoint); \
  76. if (demo->fp##entrypoint == NULL) { \
  77. ERR_EXIT("vkGetDeviceProcAddr failed to find vk" #entrypoint, \
  78. "vkGetDeviceProcAddr Failure"); \
  79. } \
  80. }
  81. static const char fragShaderCode[] = {
  82. 0x03, 0x02, 0x23, 0x07, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x08, 0x00,
  83. 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11, 0x00, 0x02, 0x00,
  84. 0x01, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x06, 0x00, 0x01, 0x00, 0x00, 0x00,
  85. 0x47, 0x4c, 0x53, 0x4c, 0x2e, 0x73, 0x74, 0x64, 0x2e, 0x34, 0x35, 0x30,
  86. 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
  87. 0x01, 0x00, 0x00, 0x00, 0x0f, 0x00, 0x07, 0x00, 0x04, 0x00, 0x00, 0x00,
  88. 0x04, 0x00, 0x00, 0x00, 0x6d, 0x61, 0x69, 0x6e, 0x00, 0x00, 0x00, 0x00,
  89. 0x09, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x10, 0x00, 0x03, 0x00,
  90. 0x04, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x03, 0x00, 0x03, 0x00,
  91. 0x02, 0x00, 0x00, 0x00, 0x90, 0x01, 0x00, 0x00, 0x04, 0x00, 0x09, 0x00,
  92. 0x47, 0x4c, 0x5f, 0x41, 0x52, 0x42, 0x5f, 0x73, 0x65, 0x70, 0x61, 0x72,
  93. 0x61, 0x74, 0x65, 0x5f, 0x73, 0x68, 0x61, 0x64, 0x65, 0x72, 0x5f, 0x6f,
  94. 0x62, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x00, 0x00, 0x04, 0x00, 0x09, 0x00,
  95. 0x47, 0x4c, 0x5f, 0x41, 0x52, 0x42, 0x5f, 0x73, 0x68, 0x61, 0x64, 0x69,
  96. 0x6e, 0x67, 0x5f, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x5f,
  97. 0x34, 0x32, 0x30, 0x70, 0x61, 0x63, 0x6b, 0x00, 0x05, 0x00, 0x04, 0x00,
  98. 0x04, 0x00, 0x00, 0x00, 0x6d, 0x61, 0x69, 0x6e, 0x00, 0x00, 0x00, 0x00,
  99. 0x05, 0x00, 0x05, 0x00, 0x09, 0x00, 0x00, 0x00, 0x75, 0x46, 0x72, 0x61,
  100. 0x67, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x00, 0x00, 0x05, 0x00, 0x03, 0x00,
  101. 0x0d, 0x00, 0x00, 0x00, 0x74, 0x65, 0x78, 0x00, 0x05, 0x00, 0x05, 0x00,
  102. 0x11, 0x00, 0x00, 0x00, 0x74, 0x65, 0x78, 0x63, 0x6f, 0x6f, 0x72, 0x64,
  103. 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, 0x09, 0x00, 0x00, 0x00,
  104. 0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00,
  105. 0x0d, 0x00, 0x00, 0x00, 0x22, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  106. 0x47, 0x00, 0x04, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x21, 0x00, 0x00, 0x00,
  107. 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, 0x11, 0x00, 0x00, 0x00,
  108. 0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x13, 0x00, 0x02, 0x00,
  109. 0x02, 0x00, 0x00, 0x00, 0x21, 0x00, 0x03, 0x00, 0x03, 0x00, 0x00, 0x00,
  110. 0x02, 0x00, 0x00, 0x00, 0x16, 0x00, 0x03, 0x00, 0x06, 0x00, 0x00, 0x00,
  111. 0x20, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00, 0x07, 0x00, 0x00, 0x00,
  112. 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00,
  113. 0x08, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00,
  114. 0x3b, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00,
  115. 0x03, 0x00, 0x00, 0x00, 0x19, 0x00, 0x09, 0x00, 0x0a, 0x00, 0x00, 0x00,
  116. 0x06, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  117. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
  118. 0x00, 0x00, 0x00, 0x00, 0x1b, 0x00, 0x03, 0x00, 0x0b, 0x00, 0x00, 0x00,
  119. 0x0a, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x0c, 0x00, 0x00, 0x00,
  120. 0x00, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00,
  121. 0x0c, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  122. 0x17, 0x00, 0x04, 0x00, 0x0f, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00,
  123. 0x02, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x10, 0x00, 0x00, 0x00,
  124. 0x01, 0x00, 0x00, 0x00, 0x0f, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00,
  125. 0x10, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
  126. 0x36, 0x00, 0x05, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
  127. 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xf8, 0x00, 0x02, 0x00,
  128. 0x05, 0x00, 0x00, 0x00, 0x3d, 0x00, 0x04, 0x00, 0x0b, 0x00, 0x00, 0x00,
  129. 0x0e, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x3d, 0x00, 0x04, 0x00,
  130. 0x0f, 0x00, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00,
  131. 0x57, 0x00, 0x05, 0x00, 0x07, 0x00, 0x00, 0x00, 0x13, 0x00, 0x00, 0x00,
  132. 0x0e, 0x00, 0x00, 0x00, 0x12, 0x00, 0x00, 0x00, 0x3e, 0x00, 0x03, 0x00,
  133. 0x09, 0x00, 0x00, 0x00, 0x13, 0x00, 0x00, 0x00, 0xfd, 0x00, 0x01, 0x00,
  134. 0x38, 0x00, 0x01, 0x00
  135. };
  136. static const char vertShaderCode[] = {
  137. 0x03, 0x02, 0x23, 0x07, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x08, 0x00,
  138. 0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11, 0x00, 0x02, 0x00,
  139. 0x01, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x06, 0x00, 0x01, 0x00, 0x00, 0x00,
  140. 0x47, 0x4c, 0x53, 0x4c, 0x2e, 0x73, 0x74, 0x64, 0x2e, 0x34, 0x35, 0x30,
  141. 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
  142. 0x01, 0x00, 0x00, 0x00, 0x0f, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x00, 0x00,
  143. 0x04, 0x00, 0x00, 0x00, 0x6d, 0x61, 0x69, 0x6e, 0x00, 0x00, 0x00, 0x00,
  144. 0x09, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x13, 0x00, 0x00, 0x00,
  145. 0x17, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x1d, 0x00, 0x00, 0x00,
  146. 0x03, 0x00, 0x03, 0x00, 0x02, 0x00, 0x00, 0x00, 0x90, 0x01, 0x00, 0x00,
  147. 0x04, 0x00, 0x09, 0x00, 0x47, 0x4c, 0x5f, 0x41, 0x52, 0x42, 0x5f, 0x73,
  148. 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x65, 0x5f, 0x73, 0x68, 0x61, 0x64,
  149. 0x65, 0x72, 0x5f, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x00, 0x00,
  150. 0x04, 0x00, 0x09, 0x00, 0x47, 0x4c, 0x5f, 0x41, 0x52, 0x42, 0x5f, 0x73,
  151. 0x68, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x6c, 0x61, 0x6e, 0x67, 0x75,
  152. 0x61, 0x67, 0x65, 0x5f, 0x34, 0x32, 0x30, 0x70, 0x61, 0x63, 0x6b, 0x00,
  153. 0x05, 0x00, 0x04, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6d, 0x61, 0x69, 0x6e,
  154. 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x05, 0x00, 0x09, 0x00, 0x00, 0x00,
  155. 0x74, 0x65, 0x78, 0x63, 0x6f, 0x6f, 0x72, 0x64, 0x00, 0x00, 0x00, 0x00,
  156. 0x05, 0x00, 0x04, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x61, 0x74, 0x74, 0x72,
  157. 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, 0x11, 0x00, 0x00, 0x00,
  158. 0x67, 0x6c, 0x5f, 0x50, 0x65, 0x72, 0x56, 0x65, 0x72, 0x74, 0x65, 0x78,
  159. 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x06, 0x00, 0x11, 0x00, 0x00, 0x00,
  160. 0x00, 0x00, 0x00, 0x00, 0x67, 0x6c, 0x5f, 0x50, 0x6f, 0x73, 0x69, 0x74,
  161. 0x69, 0x6f, 0x6e, 0x00, 0x06, 0x00, 0x07, 0x00, 0x11, 0x00, 0x00, 0x00,
  162. 0x01, 0x00, 0x00, 0x00, 0x67, 0x6c, 0x5f, 0x50, 0x6f, 0x69, 0x6e, 0x74,
  163. 0x53, 0x69, 0x7a, 0x65, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x07, 0x00,
  164. 0x11, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x67, 0x6c, 0x5f, 0x43,
  165. 0x6c, 0x69, 0x70, 0x44, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x00,
  166. 0x05, 0x00, 0x03, 0x00, 0x13, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  167. 0x05, 0x00, 0x03, 0x00, 0x17, 0x00, 0x00, 0x00, 0x70, 0x6f, 0x73, 0x00,
  168. 0x05, 0x00, 0x05, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x67, 0x6c, 0x5f, 0x56,
  169. 0x65, 0x72, 0x74, 0x65, 0x78, 0x49, 0x44, 0x00, 0x05, 0x00, 0x06, 0x00,
  170. 0x1d, 0x00, 0x00, 0x00, 0x67, 0x6c, 0x5f, 0x49, 0x6e, 0x73, 0x74, 0x61,
  171. 0x6e, 0x63, 0x65, 0x49, 0x44, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00,
  172. 0x09, 0x00, 0x00, 0x00, 0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  173. 0x47, 0x00, 0x04, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x1e, 0x00, 0x00, 0x00,
  174. 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x05, 0x00, 0x11, 0x00, 0x00, 0x00,
  175. 0x00, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  176. 0x48, 0x00, 0x05, 0x00, 0x11, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
  177. 0x0b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x05, 0x00,
  178. 0x11, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00,
  179. 0x03, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00, 0x11, 0x00, 0x00, 0x00,
  180. 0x02, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, 0x17, 0x00, 0x00, 0x00,
  181. 0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00,
  182. 0x1c, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00,
  183. 0x47, 0x00, 0x04, 0x00, 0x1d, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00,
  184. 0x06, 0x00, 0x00, 0x00, 0x13, 0x00, 0x02, 0x00, 0x02, 0x00, 0x00, 0x00,
  185. 0x21, 0x00, 0x03, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
  186. 0x16, 0x00, 0x03, 0x00, 0x06, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00,
  187. 0x17, 0x00, 0x04, 0x00, 0x07, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00,
  188. 0x02, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00,
  189. 0x03, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00,
  190. 0x08, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
  191. 0x20, 0x00, 0x04, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
  192. 0x07, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00, 0x0a, 0x00, 0x00, 0x00,
  193. 0x0b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00,
  194. 0x0d, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
  195. 0x15, 0x00, 0x04, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00,
  196. 0x00, 0x00, 0x00, 0x00, 0x2b, 0x00, 0x04, 0x00, 0x0e, 0x00, 0x00, 0x00,
  197. 0x0f, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x04, 0x00,
  198. 0x10, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0f, 0x00, 0x00, 0x00,
  199. 0x1e, 0x00, 0x05, 0x00, 0x11, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00,
  200. 0x06, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00,
  201. 0x12, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00,
  202. 0x3b, 0x00, 0x04, 0x00, 0x12, 0x00, 0x00, 0x00, 0x13, 0x00, 0x00, 0x00,
  203. 0x03, 0x00, 0x00, 0x00, 0x15, 0x00, 0x04, 0x00, 0x14, 0x00, 0x00, 0x00,
  204. 0x20, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x2b, 0x00, 0x04, 0x00,
  205. 0x14, 0x00, 0x00, 0x00, 0x15, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  206. 0x20, 0x00, 0x04, 0x00, 0x16, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
  207. 0x0d, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00, 0x16, 0x00, 0x00, 0x00,
  208. 0x17, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00,
  209. 0x19, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00,
  210. 0x20, 0x00, 0x04, 0x00, 0x1b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
  211. 0x14, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00, 0x1b, 0x00, 0x00, 0x00,
  212. 0x1c, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x3b, 0x00, 0x04, 0x00,
  213. 0x1b, 0x00, 0x00, 0x00, 0x1d, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
  214. 0x36, 0x00, 0x05, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
  215. 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xf8, 0x00, 0x02, 0x00,
  216. 0x05, 0x00, 0x00, 0x00, 0x3d, 0x00, 0x04, 0x00, 0x07, 0x00, 0x00, 0x00,
  217. 0x0c, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x3e, 0x00, 0x03, 0x00,
  218. 0x09, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x3d, 0x00, 0x04, 0x00,
  219. 0x0d, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x17, 0x00, 0x00, 0x00,
  220. 0x41, 0x00, 0x05, 0x00, 0x19, 0x00, 0x00, 0x00, 0x1a, 0x00, 0x00, 0x00,
  221. 0x13, 0x00, 0x00, 0x00, 0x15, 0x00, 0x00, 0x00, 0x3e, 0x00, 0x03, 0x00,
  222. 0x1a, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0xfd, 0x00, 0x01, 0x00,
  223. 0x38, 0x00, 0x01, 0x00
  224. };
  225. struct texture_object {
  226. VkSampler sampler;
  227. VkImage image;
  228. VkImageLayout imageLayout;
  229. VkDeviceMemory mem;
  230. VkImageView view;
  231. int32_t tex_width, tex_height;
  232. };
  233. VKAPI_ATTR VkBool32 VKAPI_CALL
  234. dbgFunc(VkFlags msgFlags, VkDebugReportObjectTypeEXT objType,
  235. uint64_t srcObject, size_t location, int32_t msgCode,
  236. const char *pLayerPrefix, const char *pMsg, void *pUserData) {
  237. char *message = (char *)malloc(strlen(pMsg) + 100);
  238. assert(message);
  239. if (msgFlags & VK_DEBUG_REPORT_ERROR_BIT_EXT) {
  240. sprintf(message, "ERROR: [%s] Code %d : %s", pLayerPrefix, msgCode,
  241. pMsg);
  242. } else if (msgFlags & VK_DEBUG_REPORT_WARNING_BIT_EXT) {
  243. sprintf(message, "WARNING: [%s] Code %d : %s", pLayerPrefix, msgCode,
  244. pMsg);
  245. } else {
  246. return false;
  247. }
  248. printf("%s\n", message);
  249. fflush(stdout);
  250. free(message);
  251. /*
  252. * false indicates that layer should not bail-out of an
  253. * API call that had validation failures. This may mean that the
  254. * app dies inside the driver due to invalid parameter(s).
  255. * That's what would happen without validation layers, so we'll
  256. * keep that behavior here.
  257. */
  258. return false;
  259. }
  260. typedef struct _SwapchainBuffers {
  261. VkImage image;
  262. VkCommandBuffer cmd;
  263. VkImageView view;
  264. } SwapchainBuffers;
  265. struct demo {
  266. GLFWwindow* window;
  267. VkSurfaceKHR surface;
  268. bool use_staging_buffer;
  269. VkAllocationCallbacks allocator;
  270. VkInstance inst;
  271. VkPhysicalDevice gpu;
  272. VkDevice device;
  273. VkQueue queue;
  274. VkPhysicalDeviceProperties gpu_props;
  275. VkQueueFamilyProperties *queue_props;
  276. uint32_t graphics_queue_node_index;
  277. uint32_t enabled_extension_count;
  278. uint32_t enabled_layer_count;
  279. const char *extension_names[64];
  280. char *device_validation_layers[64];
  281. int width, height;
  282. VkFormat format;
  283. VkColorSpaceKHR color_space;
  284. PFN_vkGetPhysicalDeviceSurfaceSupportKHR
  285. fpGetPhysicalDeviceSurfaceSupportKHR;
  286. PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR
  287. fpGetPhysicalDeviceSurfaceCapabilitiesKHR;
  288. PFN_vkGetPhysicalDeviceSurfaceFormatsKHR
  289. fpGetPhysicalDeviceSurfaceFormatsKHR;
  290. PFN_vkGetPhysicalDeviceSurfacePresentModesKHR
  291. fpGetPhysicalDeviceSurfacePresentModesKHR;
  292. PFN_vkCreateSwapchainKHR fpCreateSwapchainKHR;
  293. PFN_vkDestroySwapchainKHR fpDestroySwapchainKHR;
  294. PFN_vkGetSwapchainImagesKHR fpGetSwapchainImagesKHR;
  295. PFN_vkAcquireNextImageKHR fpAcquireNextImageKHR;
  296. PFN_vkQueuePresentKHR fpQueuePresentKHR;
  297. uint32_t swapchainImageCount;
  298. VkSwapchainKHR swapchain;
  299. SwapchainBuffers *buffers;
  300. VkCommandPool cmd_pool;
  301. struct {
  302. VkFormat format;
  303. VkImage image;
  304. VkDeviceMemory mem;
  305. VkImageView view;
  306. } depth;
  307. struct texture_object textures[DEMO_TEXTURE_COUNT];
  308. struct {
  309. VkBuffer buf;
  310. VkDeviceMemory mem;
  311. VkPipelineVertexInputStateCreateInfo vi;
  312. VkVertexInputBindingDescription vi_bindings[1];
  313. VkVertexInputAttributeDescription vi_attrs[2];
  314. } vertices;
  315. VkCommandBuffer setup_cmd; // Command Buffer for initialization commands
  316. VkCommandBuffer draw_cmd; // Command Buffer for drawing commands
  317. VkPipelineLayout pipeline_layout;
  318. VkDescriptorSetLayout desc_layout;
  319. VkPipelineCache pipelineCache;
  320. VkRenderPass render_pass;
  321. VkPipeline pipeline;
  322. VkShaderModule vert_shader_module;
  323. VkShaderModule frag_shader_module;
  324. VkDescriptorPool desc_pool;
  325. VkDescriptorSet desc_set;
  326. VkFramebuffer *framebuffers;
  327. VkPhysicalDeviceMemoryProperties memory_properties;
  328. bool validate;
  329. PFN_vkCreateDebugReportCallbackEXT CreateDebugReportCallback;
  330. PFN_vkDestroyDebugReportCallbackEXT DestroyDebugReportCallback;
  331. VkDebugReportCallbackEXT msg_callback;
  332. float depthStencil;
  333. float depthIncrement;
  334. uint32_t current_buffer;
  335. uint32_t queue_count;
  336. };
  337. // Forward declaration:
  338. static void demo_resize(struct demo *demo);
  339. static bool memory_type_from_properties(struct demo *demo, uint32_t typeBits,
  340. VkFlags requirements_mask,
  341. uint32_t *typeIndex) {
  342. uint32_t i;
  343. // Search memtypes to find first index with those properties
  344. for (i = 0; i < 32; i++) {
  345. if ((typeBits & 1) == 1) {
  346. // Type is available, does it match user properties?
  347. if ((demo->memory_properties.memoryTypes[i].propertyFlags &
  348. requirements_mask) == requirements_mask) {
  349. *typeIndex = i;
  350. return true;
  351. }
  352. }
  353. typeBits >>= 1;
  354. }
  355. // No memory types matched, return failure
  356. return false;
  357. }
  358. static void demo_flush_init_cmd(struct demo *demo) {
  359. VkResult U_ASSERT_ONLY err;
  360. if (demo->setup_cmd == VK_NULL_HANDLE)
  361. return;
  362. err = vkEndCommandBuffer(demo->setup_cmd);
  363. assert(!err);
  364. const VkCommandBuffer cmd_bufs[] = {demo->setup_cmd};
  365. VkFence nullFence = {VK_NULL_HANDLE};
  366. VkSubmitInfo submit_info = {.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
  367. .pNext = NULL,
  368. .waitSemaphoreCount = 0,
  369. .pWaitSemaphores = NULL,
  370. .pWaitDstStageMask = NULL,
  371. .commandBufferCount = 1,
  372. .pCommandBuffers = cmd_bufs,
  373. .signalSemaphoreCount = 0,
  374. .pSignalSemaphores = NULL};
  375. err = vkQueueSubmit(demo->queue, 1, &submit_info, nullFence);
  376. assert(!err);
  377. err = vkQueueWaitIdle(demo->queue);
  378. assert(!err);
  379. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, cmd_bufs);
  380. demo->setup_cmd = VK_NULL_HANDLE;
  381. }
  382. static void demo_set_image_layout(struct demo *demo, VkImage image,
  383. VkImageAspectFlags aspectMask,
  384. VkImageLayout old_image_layout,
  385. VkImageLayout new_image_layout) {
  386. VkResult U_ASSERT_ONLY err;
  387. if (demo->setup_cmd == VK_NULL_HANDLE) {
  388. const VkCommandBufferAllocateInfo cmd = {
  389. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
  390. .pNext = NULL,
  391. .commandPool = demo->cmd_pool,
  392. .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
  393. .commandBufferCount = 1,
  394. };
  395. err = vkAllocateCommandBuffers(demo->device, &cmd, &demo->setup_cmd);
  396. assert(!err);
  397. VkCommandBufferInheritanceInfo cmd_buf_hinfo = {
  398. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
  399. .pNext = NULL,
  400. .renderPass = VK_NULL_HANDLE,
  401. .subpass = 0,
  402. .framebuffer = VK_NULL_HANDLE,
  403. .occlusionQueryEnable = VK_FALSE,
  404. .queryFlags = 0,
  405. .pipelineStatistics = 0,
  406. };
  407. VkCommandBufferBeginInfo cmd_buf_info = {
  408. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
  409. .pNext = NULL,
  410. .flags = 0,
  411. .pInheritanceInfo = &cmd_buf_hinfo,
  412. };
  413. err = vkBeginCommandBuffer(demo->setup_cmd, &cmd_buf_info);
  414. assert(!err);
  415. }
  416. VkImageMemoryBarrier image_memory_barrier = {
  417. .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
  418. .pNext = NULL,
  419. .srcAccessMask = 0,
  420. .dstAccessMask = 0,
  421. .oldLayout = old_image_layout,
  422. .newLayout = new_image_layout,
  423. .image = image,
  424. .subresourceRange = {aspectMask, 0, 1, 0, 1}};
  425. if (new_image_layout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
  426. /* Make sure anything that was copying from this image has completed */
  427. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  428. }
  429. if (new_image_layout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
  430. image_memory_barrier.dstAccessMask =
  431. VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
  432. }
  433. if (new_image_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL) {
  434. image_memory_barrier.dstAccessMask =
  435. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  436. }
  437. if (new_image_layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) {
  438. /* Make sure any Copy or CPU writes to image are flushed */
  439. image_memory_barrier.dstAccessMask =
  440. VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
  441. }
  442. VkImageMemoryBarrier *pmemory_barrier = &image_memory_barrier;
  443. VkPipelineStageFlags src_stages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
  444. VkPipelineStageFlags dest_stages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
  445. vkCmdPipelineBarrier(demo->setup_cmd, src_stages, dest_stages, 0, 0, NULL,
  446. 0, NULL, 1, pmemory_barrier);
  447. }
  448. static void demo_draw_build_cmd(struct demo *demo) {
  449. const VkCommandBufferInheritanceInfo cmd_buf_hinfo = {
  450. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
  451. .pNext = NULL,
  452. .renderPass = VK_NULL_HANDLE,
  453. .subpass = 0,
  454. .framebuffer = VK_NULL_HANDLE,
  455. .occlusionQueryEnable = VK_FALSE,
  456. .queryFlags = 0,
  457. .pipelineStatistics = 0,
  458. };
  459. const VkCommandBufferBeginInfo cmd_buf_info = {
  460. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
  461. .pNext = NULL,
  462. .flags = 0,
  463. .pInheritanceInfo = &cmd_buf_hinfo,
  464. };
  465. const VkClearValue clear_values[2] = {
  466. [0] = {.color.float32 = {0.2f, 0.2f, 0.2f, 0.2f}},
  467. [1] = {.depthStencil = {demo->depthStencil, 0}},
  468. };
  469. const VkRenderPassBeginInfo rp_begin = {
  470. .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
  471. .pNext = NULL,
  472. .renderPass = demo->render_pass,
  473. .framebuffer = demo->framebuffers[demo->current_buffer],
  474. .renderArea.offset.x = 0,
  475. .renderArea.offset.y = 0,
  476. .renderArea.extent.width = demo->width,
  477. .renderArea.extent.height = demo->height,
  478. .clearValueCount = 2,
  479. .pClearValues = clear_values,
  480. };
  481. VkResult U_ASSERT_ONLY err;
  482. err = vkBeginCommandBuffer(demo->draw_cmd, &cmd_buf_info);
  483. assert(!err);
  484. vkCmdBeginRenderPass(demo->draw_cmd, &rp_begin, VK_SUBPASS_CONTENTS_INLINE);
  485. vkCmdBindPipeline(demo->draw_cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
  486. demo->pipeline);
  487. vkCmdBindDescriptorSets(demo->draw_cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
  488. demo->pipeline_layout, 0, 1, &demo->desc_set, 0,
  489. NULL);
  490. VkViewport viewport;
  491. memset(&viewport, 0, sizeof(viewport));
  492. viewport.height = (float)demo->height;
  493. viewport.width = (float)demo->width;
  494. viewport.minDepth = (float)0.0f;
  495. viewport.maxDepth = (float)1.0f;
  496. vkCmdSetViewport(demo->draw_cmd, 0, 1, &viewport);
  497. VkRect2D scissor;
  498. memset(&scissor, 0, sizeof(scissor));
  499. scissor.extent.width = demo->width;
  500. scissor.extent.height = demo->height;
  501. scissor.offset.x = 0;
  502. scissor.offset.y = 0;
  503. vkCmdSetScissor(demo->draw_cmd, 0, 1, &scissor);
  504. VkDeviceSize offsets[1] = {0};
  505. vkCmdBindVertexBuffers(demo->draw_cmd, VERTEX_BUFFER_BIND_ID, 1,
  506. &demo->vertices.buf, offsets);
  507. vkCmdDraw(demo->draw_cmd, 3, 1, 0, 0);
  508. vkCmdEndRenderPass(demo->draw_cmd);
  509. VkImageMemoryBarrier prePresentBarrier = {
  510. .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
  511. .pNext = NULL,
  512. .srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
  513. .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT,
  514. .oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  515. .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  516. .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
  517. .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
  518. .subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}};
  519. prePresentBarrier.image = demo->buffers[demo->current_buffer].image;
  520. VkImageMemoryBarrier *pmemory_barrier = &prePresentBarrier;
  521. vkCmdPipelineBarrier(demo->draw_cmd, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
  522. VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, NULL, 0,
  523. NULL, 1, pmemory_barrier);
  524. err = vkEndCommandBuffer(demo->draw_cmd);
  525. assert(!err);
  526. }
  527. static void demo_draw(struct demo *demo) {
  528. VkResult U_ASSERT_ONLY err;
  529. VkSemaphore presentCompleteSemaphore;
  530. VkSemaphoreCreateInfo presentCompleteSemaphoreCreateInfo = {
  531. .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
  532. .pNext = NULL,
  533. .flags = 0,
  534. };
  535. err = vkCreateSemaphore(demo->device, &presentCompleteSemaphoreCreateInfo,
  536. NULL, &presentCompleteSemaphore);
  537. assert(!err);
  538. // Get the index of the next available swapchain image:
  539. err = demo->fpAcquireNextImageKHR(demo->device, demo->swapchain, UINT64_MAX,
  540. presentCompleteSemaphore,
  541. (VkFence)0, // TODO: Show use of fence
  542. &demo->current_buffer);
  543. if (err == VK_ERROR_OUT_OF_DATE_KHR) {
  544. // demo->swapchain is out of date (e.g. the window was resized) and
  545. // must be recreated:
  546. demo_resize(demo);
  547. demo_draw(demo);
  548. vkDestroySemaphore(demo->device, presentCompleteSemaphore, NULL);
  549. return;
  550. } else if (err == VK_SUBOPTIMAL_KHR) {
  551. // demo->swapchain is not as optimal as it could be, but the platform's
  552. // presentation engine will still present the image correctly.
  553. } else {
  554. assert(!err);
  555. }
  556. // Assume the command buffer has been run on current_buffer before so
  557. // we need to set the image layout back to COLOR_ATTACHMENT_OPTIMAL
  558. demo_set_image_layout(demo, demo->buffers[demo->current_buffer].image,
  559. VK_IMAGE_ASPECT_COLOR_BIT,
  560. VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  561. VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
  562. demo_flush_init_cmd(demo);
  563. // Wait for the present complete semaphore to be signaled to ensure
  564. // that the image won't be rendered to until the presentation
  565. // engine has fully released ownership to the application, and it is
  566. // okay to render to the image.
  567. // FIXME/TODO: DEAL WITH VK_IMAGE_LAYOUT_PRESENT_SRC_KHR
  568. demo_draw_build_cmd(demo);
  569. VkFence nullFence = VK_NULL_HANDLE;
  570. VkPipelineStageFlags pipe_stage_flags =
  571. VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
  572. VkSubmitInfo submit_info = {.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
  573. .pNext = NULL,
  574. .waitSemaphoreCount = 1,
  575. .pWaitSemaphores = &presentCompleteSemaphore,
  576. .pWaitDstStageMask = &pipe_stage_flags,
  577. .commandBufferCount = 1,
  578. .pCommandBuffers = &demo->draw_cmd,
  579. .signalSemaphoreCount = 0,
  580. .pSignalSemaphores = NULL};
  581. err = vkQueueSubmit(demo->queue, 1, &submit_info, nullFence);
  582. assert(!err);
  583. VkPresentInfoKHR present = {
  584. .sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
  585. .pNext = NULL,
  586. .swapchainCount = 1,
  587. .pSwapchains = &demo->swapchain,
  588. .pImageIndices = &demo->current_buffer,
  589. };
  590. // TBD/TODO: SHOULD THE "present" PARAMETER BE "const" IN THE HEADER?
  591. err = demo->fpQueuePresentKHR(demo->queue, &present);
  592. if (err == VK_ERROR_OUT_OF_DATE_KHR) {
  593. // demo->swapchain is out of date (e.g. the window was resized) and
  594. // must be recreated:
  595. demo_resize(demo);
  596. } else if (err == VK_SUBOPTIMAL_KHR) {
  597. // demo->swapchain is not as optimal as it could be, but the platform's
  598. // presentation engine will still present the image correctly.
  599. } else {
  600. assert(!err);
  601. }
  602. err = vkQueueWaitIdle(demo->queue);
  603. assert(err == VK_SUCCESS);
  604. vkDestroySemaphore(demo->device, presentCompleteSemaphore, NULL);
  605. }
  606. static void demo_prepare_buffers(struct demo *demo) {
  607. VkResult U_ASSERT_ONLY err;
  608. VkSwapchainKHR oldSwapchain = demo->swapchain;
  609. // Check the surface capabilities and formats
  610. VkSurfaceCapabilitiesKHR surfCapabilities;
  611. err = demo->fpGetPhysicalDeviceSurfaceCapabilitiesKHR(
  612. demo->gpu, demo->surface, &surfCapabilities);
  613. assert(!err);
  614. uint32_t presentModeCount;
  615. err = demo->fpGetPhysicalDeviceSurfacePresentModesKHR(
  616. demo->gpu, demo->surface, &presentModeCount, NULL);
  617. assert(!err);
  618. VkPresentModeKHR *presentModes =
  619. (VkPresentModeKHR *)malloc(presentModeCount * sizeof(VkPresentModeKHR));
  620. assert(presentModes);
  621. err = demo->fpGetPhysicalDeviceSurfacePresentModesKHR(
  622. demo->gpu, demo->surface, &presentModeCount, presentModes);
  623. assert(!err);
  624. VkExtent2D swapchainExtent;
  625. // width and height are either both -1, or both not -1.
  626. if (surfCapabilities.currentExtent.width == (uint32_t)-1) {
  627. // If the surface size is undefined, the size is set to
  628. // the size of the images requested.
  629. swapchainExtent.width = demo->width;
  630. swapchainExtent.height = demo->height;
  631. } else {
  632. // If the surface size is defined, the swap chain size must match
  633. swapchainExtent = surfCapabilities.currentExtent;
  634. demo->width = surfCapabilities.currentExtent.width;
  635. demo->height = surfCapabilities.currentExtent.height;
  636. }
  637. VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR;
  638. // Determine the number of VkImage's to use in the swap chain (we desire to
  639. // own only 1 image at a time, besides the images being displayed and
  640. // queued for display):
  641. uint32_t desiredNumberOfSwapchainImages =
  642. surfCapabilities.minImageCount + 1;
  643. if ((surfCapabilities.maxImageCount > 0) &&
  644. (desiredNumberOfSwapchainImages > surfCapabilities.maxImageCount)) {
  645. // Application must settle for fewer images than desired:
  646. desiredNumberOfSwapchainImages = surfCapabilities.maxImageCount;
  647. }
  648. VkSurfaceTransformFlagsKHR preTransform;
  649. if (surfCapabilities.supportedTransforms &
  650. VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) {
  651. preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
  652. } else {
  653. preTransform = surfCapabilities.currentTransform;
  654. }
  655. const VkSwapchainCreateInfoKHR swapchain = {
  656. .sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
  657. .pNext = NULL,
  658. .surface = demo->surface,
  659. .minImageCount = desiredNumberOfSwapchainImages,
  660. .imageFormat = demo->format,
  661. .imageColorSpace = demo->color_space,
  662. .imageExtent =
  663. {
  664. .width = swapchainExtent.width, .height = swapchainExtent.height,
  665. },
  666. .imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
  667. .preTransform = preTransform,
  668. .compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
  669. .imageArrayLayers = 1,
  670. .imageSharingMode = VK_SHARING_MODE_EXCLUSIVE,
  671. .queueFamilyIndexCount = 0,
  672. .pQueueFamilyIndices = NULL,
  673. .presentMode = swapchainPresentMode,
  674. .oldSwapchain = oldSwapchain,
  675. .clipped = true,
  676. };
  677. uint32_t i;
  678. err = demo->fpCreateSwapchainKHR(demo->device, &swapchain, NULL,
  679. &demo->swapchain);
  680. assert(!err);
  681. // If we just re-created an existing swapchain, we should destroy the old
  682. // swapchain at this point.
  683. // Note: destroying the swapchain also cleans up all its associated
  684. // presentable images once the platform is done with them.
  685. if (oldSwapchain != VK_NULL_HANDLE) {
  686. demo->fpDestroySwapchainKHR(demo->device, oldSwapchain, NULL);
  687. }
  688. err = demo->fpGetSwapchainImagesKHR(demo->device, demo->swapchain,
  689. &demo->swapchainImageCount, NULL);
  690. assert(!err);
  691. VkImage *swapchainImages =
  692. (VkImage *)malloc(demo->swapchainImageCount * sizeof(VkImage));
  693. assert(swapchainImages);
  694. err = demo->fpGetSwapchainImagesKHR(demo->device, demo->swapchain,
  695. &demo->swapchainImageCount,
  696. swapchainImages);
  697. assert(!err);
  698. demo->buffers = (SwapchainBuffers *)malloc(sizeof(SwapchainBuffers) *
  699. demo->swapchainImageCount);
  700. assert(demo->buffers);
  701. for (i = 0; i < demo->swapchainImageCount; i++) {
  702. VkImageViewCreateInfo color_attachment_view = {
  703. .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  704. .pNext = NULL,
  705. .format = demo->format,
  706. .components =
  707. {
  708. .r = VK_COMPONENT_SWIZZLE_R,
  709. .g = VK_COMPONENT_SWIZZLE_G,
  710. .b = VK_COMPONENT_SWIZZLE_B,
  711. .a = VK_COMPONENT_SWIZZLE_A,
  712. },
  713. .subresourceRange = {.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
  714. .baseMipLevel = 0,
  715. .levelCount = 1,
  716. .baseArrayLayer = 0,
  717. .layerCount = 1},
  718. .viewType = VK_IMAGE_VIEW_TYPE_2D,
  719. .flags = 0,
  720. };
  721. demo->buffers[i].image = swapchainImages[i];
  722. // Render loop will expect image to have been used before and in
  723. // VK_IMAGE_LAYOUT_PRESENT_SRC_KHR
  724. // layout and will change to COLOR_ATTACHMENT_OPTIMAL, so init the image
  725. // to that state
  726. demo_set_image_layout(
  727. demo, demo->buffers[i].image, VK_IMAGE_ASPECT_COLOR_BIT,
  728. VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR);
  729. color_attachment_view.image = demo->buffers[i].image;
  730. err = vkCreateImageView(demo->device, &color_attachment_view, NULL,
  731. &demo->buffers[i].view);
  732. assert(!err);
  733. }
  734. demo->current_buffer = 0;
  735. if (NULL != presentModes) {
  736. free(presentModes);
  737. }
  738. }
  739. static void demo_prepare_depth(struct demo *demo) {
  740. const VkFormat depth_format = VK_FORMAT_D16_UNORM;
  741. const VkImageCreateInfo image = {
  742. .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
  743. .pNext = NULL,
  744. .imageType = VK_IMAGE_TYPE_2D,
  745. .format = depth_format,
  746. .extent = {demo->width, demo->height, 1},
  747. .mipLevels = 1,
  748. .arrayLayers = 1,
  749. .samples = VK_SAMPLE_COUNT_1_BIT,
  750. .tiling = VK_IMAGE_TILING_OPTIMAL,
  751. .usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
  752. .flags = 0,
  753. };
  754. VkMemoryAllocateInfo mem_alloc = {
  755. .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
  756. .pNext = NULL,
  757. .allocationSize = 0,
  758. .memoryTypeIndex = 0,
  759. };
  760. VkImageViewCreateInfo view = {
  761. .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  762. .pNext = NULL,
  763. .image = VK_NULL_HANDLE,
  764. .format = depth_format,
  765. .subresourceRange = {.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT,
  766. .baseMipLevel = 0,
  767. .levelCount = 1,
  768. .baseArrayLayer = 0,
  769. .layerCount = 1},
  770. .flags = 0,
  771. .viewType = VK_IMAGE_VIEW_TYPE_2D,
  772. };
  773. VkMemoryRequirements mem_reqs;
  774. VkResult U_ASSERT_ONLY err;
  775. bool U_ASSERT_ONLY pass;
  776. demo->depth.format = depth_format;
  777. /* create image */
  778. err = vkCreateImage(demo->device, &image, NULL, &demo->depth.image);
  779. assert(!err);
  780. /* get memory requirements for this object */
  781. vkGetImageMemoryRequirements(demo->device, demo->depth.image, &mem_reqs);
  782. /* select memory size and type */
  783. mem_alloc.allocationSize = mem_reqs.size;
  784. pass = memory_type_from_properties(demo, mem_reqs.memoryTypeBits,
  785. 0, /* No requirements */
  786. &mem_alloc.memoryTypeIndex);
  787. assert(pass);
  788. /* allocate memory */
  789. err = vkAllocateMemory(demo->device, &mem_alloc, NULL, &demo->depth.mem);
  790. assert(!err);
  791. /* bind memory */
  792. err =
  793. vkBindImageMemory(demo->device, demo->depth.image, demo->depth.mem, 0);
  794. assert(!err);
  795. demo_set_image_layout(demo, demo->depth.image, VK_IMAGE_ASPECT_DEPTH_BIT,
  796. VK_IMAGE_LAYOUT_UNDEFINED,
  797. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
  798. /* create image view */
  799. view.image = demo->depth.image;
  800. err = vkCreateImageView(demo->device, &view, NULL, &demo->depth.view);
  801. assert(!err);
  802. }
  803. static void
  804. demo_prepare_texture_image(struct demo *demo, const uint32_t *tex_colors,
  805. struct texture_object *tex_obj, VkImageTiling tiling,
  806. VkImageUsageFlags usage, VkFlags required_props) {
  807. const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
  808. const int32_t tex_width = 2;
  809. const int32_t tex_height = 2;
  810. VkResult U_ASSERT_ONLY err;
  811. bool U_ASSERT_ONLY pass;
  812. tex_obj->tex_width = tex_width;
  813. tex_obj->tex_height = tex_height;
  814. const VkImageCreateInfo image_create_info = {
  815. .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
  816. .pNext = NULL,
  817. .imageType = VK_IMAGE_TYPE_2D,
  818. .format = tex_format,
  819. .extent = {tex_width, tex_height, 1},
  820. .mipLevels = 1,
  821. .arrayLayers = 1,
  822. .samples = VK_SAMPLE_COUNT_1_BIT,
  823. .tiling = tiling,
  824. .usage = usage,
  825. .flags = 0,
  826. };
  827. VkMemoryAllocateInfo mem_alloc = {
  828. .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
  829. .pNext = NULL,
  830. .allocationSize = 0,
  831. .memoryTypeIndex = 0,
  832. };
  833. VkMemoryRequirements mem_reqs;
  834. err =
  835. vkCreateImage(demo->device, &image_create_info, NULL, &tex_obj->image);
  836. assert(!err);
  837. vkGetImageMemoryRequirements(demo->device, tex_obj->image, &mem_reqs);
  838. mem_alloc.allocationSize = mem_reqs.size;
  839. pass =
  840. memory_type_from_properties(demo, mem_reqs.memoryTypeBits,
  841. required_props, &mem_alloc.memoryTypeIndex);
  842. assert(pass);
  843. /* allocate memory */
  844. err = vkAllocateMemory(demo->device, &mem_alloc, NULL, &tex_obj->mem);
  845. assert(!err);
  846. /* bind memory */
  847. err = vkBindImageMemory(demo->device, tex_obj->image, tex_obj->mem, 0);
  848. assert(!err);
  849. if (required_props & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
  850. const VkImageSubresource subres = {
  851. .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
  852. .mipLevel = 0,
  853. .arrayLayer = 0,
  854. };
  855. VkSubresourceLayout layout;
  856. void *data;
  857. int32_t x, y;
  858. vkGetImageSubresourceLayout(demo->device, tex_obj->image, &subres,
  859. &layout);
  860. err = vkMapMemory(demo->device, tex_obj->mem, 0,
  861. mem_alloc.allocationSize, 0, &data);
  862. assert(!err);
  863. for (y = 0; y < tex_height; y++) {
  864. uint32_t *row = (uint32_t *)((char *)data + layout.rowPitch * y);
  865. for (x = 0; x < tex_width; x++)
  866. row[x] = tex_colors[(x & 1) ^ (y & 1)];
  867. }
  868. vkUnmapMemory(demo->device, tex_obj->mem);
  869. }
  870. tex_obj->imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  871. demo_set_image_layout(demo, tex_obj->image, VK_IMAGE_ASPECT_COLOR_BIT,
  872. VK_IMAGE_LAYOUT_UNDEFINED, tex_obj->imageLayout);
  873. /* setting the image layout does not reference the actual memory so no need
  874. * to add a mem ref */
  875. }
  876. static void demo_destroy_texture_image(struct demo *demo,
  877. struct texture_object *tex_obj) {
  878. /* clean up staging resources */
  879. vkDestroyImage(demo->device, tex_obj->image, NULL);
  880. vkFreeMemory(demo->device, tex_obj->mem, NULL);
  881. }
  882. static void demo_prepare_textures(struct demo *demo) {
  883. const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
  884. VkFormatProperties props;
  885. const uint32_t tex_colors[DEMO_TEXTURE_COUNT][2] = {
  886. {0xffff0000, 0xff00ff00},
  887. };
  888. uint32_t i;
  889. VkResult U_ASSERT_ONLY err;
  890. vkGetPhysicalDeviceFormatProperties(demo->gpu, tex_format, &props);
  891. for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
  892. if ((props.linearTilingFeatures &
  893. VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) &&
  894. !demo->use_staging_buffer) {
  895. /* Device can texture using linear textures */
  896. demo_prepare_texture_image(demo, tex_colors[i], &demo->textures[i],
  897. VK_IMAGE_TILING_LINEAR,
  898. VK_IMAGE_USAGE_SAMPLED_BIT,
  899. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
  900. } else if (props.optimalTilingFeatures &
  901. VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) {
  902. /* Must use staging buffer to copy linear texture to optimized */
  903. struct texture_object staging_texture;
  904. memset(&staging_texture, 0, sizeof(staging_texture));
  905. demo_prepare_texture_image(demo, tex_colors[i], &staging_texture,
  906. VK_IMAGE_TILING_LINEAR,
  907. VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
  908. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
  909. demo_prepare_texture_image(
  910. demo, tex_colors[i], &demo->textures[i],
  911. VK_IMAGE_TILING_OPTIMAL,
  912. (VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT),
  913. VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
  914. demo_set_image_layout(demo, staging_texture.image,
  915. VK_IMAGE_ASPECT_COLOR_BIT,
  916. staging_texture.imageLayout,
  917. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
  918. demo_set_image_layout(demo, demo->textures[i].image,
  919. VK_IMAGE_ASPECT_COLOR_BIT,
  920. demo->textures[i].imageLayout,
  921. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
  922. VkImageCopy copy_region = {
  923. .srcSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1},
  924. .srcOffset = {0, 0, 0},
  925. .dstSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1},
  926. .dstOffset = {0, 0, 0},
  927. .extent = {staging_texture.tex_width,
  928. staging_texture.tex_height, 1},
  929. };
  930. vkCmdCopyImage(
  931. demo->setup_cmd, staging_texture.image,
  932. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, demo->textures[i].image,
  933. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy_region);
  934. demo_set_image_layout(demo, demo->textures[i].image,
  935. VK_IMAGE_ASPECT_COLOR_BIT,
  936. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  937. demo->textures[i].imageLayout);
  938. demo_flush_init_cmd(demo);
  939. demo_destroy_texture_image(demo, &staging_texture);
  940. } else {
  941. /* Can't support VK_FORMAT_B8G8R8A8_UNORM !? */
  942. assert(!"No support for B8G8R8A8_UNORM as texture image format");
  943. }
  944. const VkSamplerCreateInfo sampler = {
  945. .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
  946. .pNext = NULL,
  947. .magFilter = VK_FILTER_NEAREST,
  948. .minFilter = VK_FILTER_NEAREST,
  949. .mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST,
  950. .addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT,
  951. .addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT,
  952. .addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT,
  953. .mipLodBias = 0.0f,
  954. .anisotropyEnable = VK_FALSE,
  955. .maxAnisotropy = 1,
  956. .compareOp = VK_COMPARE_OP_NEVER,
  957. .minLod = 0.0f,
  958. .maxLod = 0.0f,
  959. .borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
  960. .unnormalizedCoordinates = VK_FALSE,
  961. };
  962. VkImageViewCreateInfo view = {
  963. .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  964. .pNext = NULL,
  965. .image = VK_NULL_HANDLE,
  966. .viewType = VK_IMAGE_VIEW_TYPE_2D,
  967. .format = tex_format,
  968. .components =
  969. {
  970. VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G,
  971. VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A,
  972. },
  973. .subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
  974. .flags = 0,
  975. };
  976. /* create sampler */
  977. err = vkCreateSampler(demo->device, &sampler, NULL,
  978. &demo->textures[i].sampler);
  979. assert(!err);
  980. /* create image view */
  981. view.image = demo->textures[i].image;
  982. err = vkCreateImageView(demo->device, &view, NULL,
  983. &demo->textures[i].view);
  984. assert(!err);
  985. }
  986. }
  987. static void demo_prepare_vertices(struct demo *demo) {
  988. // clang-format off
  989. const float vb[3][5] = {
  990. /* position texcoord */
  991. { -1.0f, -1.0f, 0.25f, 0.0f, 0.0f },
  992. { 1.0f, -1.0f, 0.25f, 1.0f, 0.0f },
  993. { 0.0f, 1.0f, 1.0f, 0.5f, 1.0f },
  994. };
  995. // clang-format on
  996. const VkBufferCreateInfo buf_info = {
  997. .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
  998. .pNext = NULL,
  999. .size = sizeof(vb),
  1000. .usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
  1001. .flags = 0,
  1002. };
  1003. VkMemoryAllocateInfo mem_alloc = {
  1004. .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
  1005. .pNext = NULL,
  1006. .allocationSize = 0,
  1007. .memoryTypeIndex = 0,
  1008. };
  1009. VkMemoryRequirements mem_reqs;
  1010. VkResult U_ASSERT_ONLY err;
  1011. bool U_ASSERT_ONLY pass;
  1012. void *data;
  1013. memset(&demo->vertices, 0, sizeof(demo->vertices));
  1014. err = vkCreateBuffer(demo->device, &buf_info, NULL, &demo->vertices.buf);
  1015. assert(!err);
  1016. vkGetBufferMemoryRequirements(demo->device, demo->vertices.buf, &mem_reqs);
  1017. assert(!err);
  1018. mem_alloc.allocationSize = mem_reqs.size;
  1019. pass = memory_type_from_properties(demo, mem_reqs.memoryTypeBits,
  1020. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
  1021. &mem_alloc.memoryTypeIndex);
  1022. assert(pass);
  1023. err = vkAllocateMemory(demo->device, &mem_alloc, NULL, &demo->vertices.mem);
  1024. assert(!err);
  1025. err = vkMapMemory(demo->device, demo->vertices.mem, 0,
  1026. mem_alloc.allocationSize, 0, &data);
  1027. assert(!err);
  1028. memcpy(data, vb, sizeof(vb));
  1029. vkUnmapMemory(demo->device, demo->vertices.mem);
  1030. err = vkBindBufferMemory(demo->device, demo->vertices.buf,
  1031. demo->vertices.mem, 0);
  1032. assert(!err);
  1033. demo->vertices.vi.sType =
  1034. VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
  1035. demo->vertices.vi.pNext = NULL;
  1036. demo->vertices.vi.vertexBindingDescriptionCount = 1;
  1037. demo->vertices.vi.pVertexBindingDescriptions = demo->vertices.vi_bindings;
  1038. demo->vertices.vi.vertexAttributeDescriptionCount = 2;
  1039. demo->vertices.vi.pVertexAttributeDescriptions = demo->vertices.vi_attrs;
  1040. demo->vertices.vi_bindings[0].binding = VERTEX_BUFFER_BIND_ID;
  1041. demo->vertices.vi_bindings[0].stride = sizeof(vb[0]);
  1042. demo->vertices.vi_bindings[0].inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
  1043. demo->vertices.vi_attrs[0].binding = VERTEX_BUFFER_BIND_ID;
  1044. demo->vertices.vi_attrs[0].location = 0;
  1045. demo->vertices.vi_attrs[0].format = VK_FORMAT_R32G32B32_SFLOAT;
  1046. demo->vertices.vi_attrs[0].offset = 0;
  1047. demo->vertices.vi_attrs[1].binding = VERTEX_BUFFER_BIND_ID;
  1048. demo->vertices.vi_attrs[1].location = 1;
  1049. demo->vertices.vi_attrs[1].format = VK_FORMAT_R32G32_SFLOAT;
  1050. demo->vertices.vi_attrs[1].offset = sizeof(float) * 3;
  1051. }
  1052. static void demo_prepare_descriptor_layout(struct demo *demo) {
  1053. const VkDescriptorSetLayoutBinding layout_binding = {
  1054. .binding = 0,
  1055. .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
  1056. .descriptorCount = DEMO_TEXTURE_COUNT,
  1057. .stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT,
  1058. .pImmutableSamplers = NULL,
  1059. };
  1060. const VkDescriptorSetLayoutCreateInfo descriptor_layout = {
  1061. .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
  1062. .pNext = NULL,
  1063. .bindingCount = 1,
  1064. .pBindings = &layout_binding,
  1065. };
  1066. VkResult U_ASSERT_ONLY err;
  1067. err = vkCreateDescriptorSetLayout(demo->device, &descriptor_layout, NULL,
  1068. &demo->desc_layout);
  1069. assert(!err);
  1070. const VkPipelineLayoutCreateInfo pPipelineLayoutCreateInfo = {
  1071. .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
  1072. .pNext = NULL,
  1073. .setLayoutCount = 1,
  1074. .pSetLayouts = &demo->desc_layout,
  1075. };
  1076. err = vkCreatePipelineLayout(demo->device, &pPipelineLayoutCreateInfo, NULL,
  1077. &demo->pipeline_layout);
  1078. assert(!err);
  1079. }
  1080. static void demo_prepare_render_pass(struct demo *demo) {
  1081. const VkAttachmentDescription attachments[2] = {
  1082. [0] =
  1083. {
  1084. .format = demo->format,
  1085. .samples = VK_SAMPLE_COUNT_1_BIT,
  1086. .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
  1087. .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
  1088. .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
  1089. .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
  1090. .initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  1091. .finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  1092. },
  1093. [1] =
  1094. {
  1095. .format = demo->depth.format,
  1096. .samples = VK_SAMPLE_COUNT_1_BIT,
  1097. .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
  1098. .storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
  1099. .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
  1100. .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
  1101. .initialLayout =
  1102. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  1103. .finalLayout =
  1104. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  1105. },
  1106. };
  1107. const VkAttachmentReference color_reference = {
  1108. .attachment = 0, .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  1109. };
  1110. const VkAttachmentReference depth_reference = {
  1111. .attachment = 1,
  1112. .layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  1113. };
  1114. const VkSubpassDescription subpass = {
  1115. .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
  1116. .flags = 0,
  1117. .inputAttachmentCount = 0,
  1118. .pInputAttachments = NULL,
  1119. .colorAttachmentCount = 1,
  1120. .pColorAttachments = &color_reference,
  1121. .pResolveAttachments = NULL,
  1122. .pDepthStencilAttachment = &depth_reference,
  1123. .preserveAttachmentCount = 0,
  1124. .pPreserveAttachments = NULL,
  1125. };
  1126. const VkRenderPassCreateInfo rp_info = {
  1127. .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
  1128. .pNext = NULL,
  1129. .attachmentCount = 2,
  1130. .pAttachments = attachments,
  1131. .subpassCount = 1,
  1132. .pSubpasses = &subpass,
  1133. .dependencyCount = 0,
  1134. .pDependencies = NULL,
  1135. };
  1136. VkResult U_ASSERT_ONLY err;
  1137. err = vkCreateRenderPass(demo->device, &rp_info, NULL, &demo->render_pass);
  1138. assert(!err);
  1139. }
  1140. static VkShaderModule
  1141. demo_prepare_shader_module(struct demo *demo, const void *code, size_t size) {
  1142. VkShaderModuleCreateInfo moduleCreateInfo;
  1143. VkShaderModule module;
  1144. VkResult U_ASSERT_ONLY err;
  1145. moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
  1146. moduleCreateInfo.pNext = NULL;
  1147. moduleCreateInfo.codeSize = size;
  1148. moduleCreateInfo.pCode = code;
  1149. moduleCreateInfo.flags = 0;
  1150. err = vkCreateShaderModule(demo->device, &moduleCreateInfo, NULL, &module);
  1151. assert(!err);
  1152. return module;
  1153. }
  1154. static VkShaderModule demo_prepare_vs(struct demo *demo) {
  1155. size_t size = sizeof(vertShaderCode);
  1156. demo->vert_shader_module =
  1157. demo_prepare_shader_module(demo, vertShaderCode, size);
  1158. return demo->vert_shader_module;
  1159. }
  1160. static VkShaderModule demo_prepare_fs(struct demo *demo) {
  1161. size_t size = sizeof(fragShaderCode);
  1162. demo->frag_shader_module =
  1163. demo_prepare_shader_module(demo, fragShaderCode, size);
  1164. return demo->frag_shader_module;
  1165. }
  1166. static void demo_prepare_pipeline(struct demo *demo) {
  1167. VkGraphicsPipelineCreateInfo pipeline;
  1168. VkPipelineCacheCreateInfo pipelineCache;
  1169. VkPipelineVertexInputStateCreateInfo vi;
  1170. VkPipelineInputAssemblyStateCreateInfo ia;
  1171. VkPipelineRasterizationStateCreateInfo rs;
  1172. VkPipelineColorBlendStateCreateInfo cb;
  1173. VkPipelineDepthStencilStateCreateInfo ds;
  1174. VkPipelineViewportStateCreateInfo vp;
  1175. VkPipelineMultisampleStateCreateInfo ms;
  1176. VkDynamicState dynamicStateEnables[VK_DYNAMIC_STATE_RANGE_SIZE];
  1177. VkPipelineDynamicStateCreateInfo dynamicState;
  1178. VkResult U_ASSERT_ONLY err;
  1179. memset(dynamicStateEnables, 0, sizeof dynamicStateEnables);
  1180. memset(&dynamicState, 0, sizeof dynamicState);
  1181. dynamicState.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
  1182. dynamicState.pDynamicStates = dynamicStateEnables;
  1183. memset(&pipeline, 0, sizeof(pipeline));
  1184. pipeline.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
  1185. pipeline.layout = demo->pipeline_layout;
  1186. vi = demo->vertices.vi;
  1187. memset(&ia, 0, sizeof(ia));
  1188. ia.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
  1189. ia.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
  1190. memset(&rs, 0, sizeof(rs));
  1191. rs.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
  1192. rs.polygonMode = VK_POLYGON_MODE_FILL;
  1193. rs.cullMode = VK_CULL_MODE_BACK_BIT;
  1194. rs.frontFace = VK_FRONT_FACE_CLOCKWISE;
  1195. rs.depthClampEnable = VK_FALSE;
  1196. rs.rasterizerDiscardEnable = VK_FALSE;
  1197. rs.depthBiasEnable = VK_FALSE;
  1198. memset(&cb, 0, sizeof(cb));
  1199. cb.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
  1200. VkPipelineColorBlendAttachmentState att_state[1];
  1201. memset(att_state, 0, sizeof(att_state));
  1202. att_state[0].colorWriteMask = 0xf;
  1203. att_state[0].blendEnable = VK_FALSE;
  1204. cb.attachmentCount = 1;
  1205. cb.pAttachments = att_state;
  1206. memset(&vp, 0, sizeof(vp));
  1207. vp.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
  1208. vp.viewportCount = 1;
  1209. dynamicStateEnables[dynamicState.dynamicStateCount++] =
  1210. VK_DYNAMIC_STATE_VIEWPORT;
  1211. vp.scissorCount = 1;
  1212. dynamicStateEnables[dynamicState.dynamicStateCount++] =
  1213. VK_DYNAMIC_STATE_SCISSOR;
  1214. memset(&ds, 0, sizeof(ds));
  1215. ds.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
  1216. ds.depthTestEnable = VK_TRUE;
  1217. ds.depthWriteEnable = VK_TRUE;
  1218. ds.depthCompareOp = VK_COMPARE_OP_LESS_OR_EQUAL;
  1219. ds.depthBoundsTestEnable = VK_FALSE;
  1220. ds.back.failOp = VK_STENCIL_OP_KEEP;
  1221. ds.back.passOp = VK_STENCIL_OP_KEEP;
  1222. ds.back.compareOp = VK_COMPARE_OP_ALWAYS;
  1223. ds.stencilTestEnable = VK_FALSE;
  1224. ds.front = ds.back;
  1225. memset(&ms, 0, sizeof(ms));
  1226. ms.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
  1227. ms.pSampleMask = NULL;
  1228. ms.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
  1229. // Two stages: vs and fs
  1230. pipeline.stageCount = 2;
  1231. VkPipelineShaderStageCreateInfo shaderStages[2];
  1232. memset(&shaderStages, 0, 2 * sizeof(VkPipelineShaderStageCreateInfo));
  1233. shaderStages[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  1234. shaderStages[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
  1235. shaderStages[0].module = demo_prepare_vs(demo);
  1236. shaderStages[0].pName = "main";
  1237. shaderStages[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  1238. shaderStages[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
  1239. shaderStages[1].module = demo_prepare_fs(demo);
  1240. shaderStages[1].pName = "main";
  1241. pipeline.pVertexInputState = &vi;
  1242. pipeline.pInputAssemblyState = &ia;
  1243. pipeline.pRasterizationState = &rs;
  1244. pipeline.pColorBlendState = &cb;
  1245. pipeline.pMultisampleState = &ms;
  1246. pipeline.pViewportState = &vp;
  1247. pipeline.pDepthStencilState = &ds;
  1248. pipeline.pStages = shaderStages;
  1249. pipeline.renderPass = demo->render_pass;
  1250. pipeline.pDynamicState = &dynamicState;
  1251. memset(&pipelineCache, 0, sizeof(pipelineCache));
  1252. pipelineCache.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
  1253. err = vkCreatePipelineCache(demo->device, &pipelineCache, NULL,
  1254. &demo->pipelineCache);
  1255. assert(!err);
  1256. err = vkCreateGraphicsPipelines(demo->device, demo->pipelineCache, 1,
  1257. &pipeline, NULL, &demo->pipeline);
  1258. assert(!err);
  1259. vkDestroyPipelineCache(demo->device, demo->pipelineCache, NULL);
  1260. vkDestroyShaderModule(demo->device, demo->frag_shader_module, NULL);
  1261. vkDestroyShaderModule(demo->device, demo->vert_shader_module, NULL);
  1262. }
  1263. static void demo_prepare_descriptor_pool(struct demo *demo) {
  1264. const VkDescriptorPoolSize type_count = {
  1265. .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
  1266. .descriptorCount = DEMO_TEXTURE_COUNT,
  1267. };
  1268. const VkDescriptorPoolCreateInfo descriptor_pool = {
  1269. .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
  1270. .pNext = NULL,
  1271. .maxSets = 1,
  1272. .poolSizeCount = 1,
  1273. .pPoolSizes = &type_count,
  1274. };
  1275. VkResult U_ASSERT_ONLY err;
  1276. err = vkCreateDescriptorPool(demo->device, &descriptor_pool, NULL,
  1277. &demo->desc_pool);
  1278. assert(!err);
  1279. }
  1280. static void demo_prepare_descriptor_set(struct demo *demo) {
  1281. VkDescriptorImageInfo tex_descs[DEMO_TEXTURE_COUNT];
  1282. VkWriteDescriptorSet write;
  1283. VkResult U_ASSERT_ONLY err;
  1284. uint32_t i;
  1285. VkDescriptorSetAllocateInfo alloc_info = {
  1286. .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
  1287. .pNext = NULL,
  1288. .descriptorPool = demo->desc_pool,
  1289. .descriptorSetCount = 1,
  1290. .pSetLayouts = &demo->desc_layout};
  1291. err = vkAllocateDescriptorSets(demo->device, &alloc_info, &demo->desc_set);
  1292. assert(!err);
  1293. memset(&tex_descs, 0, sizeof(tex_descs));
  1294. for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
  1295. tex_descs[i].sampler = demo->textures[i].sampler;
  1296. tex_descs[i].imageView = demo->textures[i].view;
  1297. tex_descs[i].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  1298. }
  1299. memset(&write, 0, sizeof(write));
  1300. write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  1301. write.dstSet = demo->desc_set;
  1302. write.descriptorCount = DEMO_TEXTURE_COUNT;
  1303. write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  1304. write.pImageInfo = tex_descs;
  1305. vkUpdateDescriptorSets(demo->device, 1, &write, 0, NULL);
  1306. }
  1307. static void demo_prepare_framebuffers(struct demo *demo) {
  1308. VkImageView attachments[2];
  1309. attachments[1] = demo->depth.view;
  1310. const VkFramebufferCreateInfo fb_info = {
  1311. .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
  1312. .pNext = NULL,
  1313. .renderPass = demo->render_pass,
  1314. .attachmentCount = 2,
  1315. .pAttachments = attachments,
  1316. .width = demo->width,
  1317. .height = demo->height,
  1318. .layers = 1,
  1319. };
  1320. VkResult U_ASSERT_ONLY err;
  1321. uint32_t i;
  1322. demo->framebuffers = (VkFramebuffer *)malloc(demo->swapchainImageCount *
  1323. sizeof(VkFramebuffer));
  1324. assert(demo->framebuffers);
  1325. for (i = 0; i < demo->swapchainImageCount; i++) {
  1326. attachments[0] = demo->buffers[i].view;
  1327. err = vkCreateFramebuffer(demo->device, &fb_info, NULL,
  1328. &demo->framebuffers[i]);
  1329. assert(!err);
  1330. }
  1331. }
  1332. static void demo_prepare(struct demo *demo) {
  1333. VkResult U_ASSERT_ONLY err;
  1334. const VkCommandPoolCreateInfo cmd_pool_info = {
  1335. .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
  1336. .pNext = NULL,
  1337. .queueFamilyIndex = demo->graphics_queue_node_index,
  1338. .flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
  1339. };
  1340. err = vkCreateCommandPool(demo->device, &cmd_pool_info, NULL,
  1341. &demo->cmd_pool);
  1342. assert(!err);
  1343. const VkCommandBufferAllocateInfo cmd = {
  1344. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
  1345. .pNext = NULL,
  1346. .commandPool = demo->cmd_pool,
  1347. .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
  1348. .commandBufferCount = 1,
  1349. };
  1350. err = vkAllocateCommandBuffers(demo->device, &cmd, &demo->draw_cmd);
  1351. assert(!err);
  1352. demo_prepare_buffers(demo);
  1353. demo_prepare_depth(demo);
  1354. demo_prepare_textures(demo);
  1355. demo_prepare_vertices(demo);
  1356. demo_prepare_descriptor_layout(demo);
  1357. demo_prepare_render_pass(demo);
  1358. demo_prepare_pipeline(demo);
  1359. demo_prepare_descriptor_pool(demo);
  1360. demo_prepare_descriptor_set(demo);
  1361. demo_prepare_framebuffers(demo);
  1362. }
  1363. static void demo_error_callback(int error, const char* description) {
  1364. printf("GLFW error: %s\n", description);
  1365. fflush(stdout);
  1366. }
  1367. static void demo_key_callback(GLFWwindow* window, int key, int scancode, int action, int mods) {
  1368. if (key == GLFW_KEY_ESCAPE && action == GLFW_RELEASE)
  1369. glfwSetWindowShouldClose(window, GLFW_TRUE);
  1370. }
  1371. static void demo_refresh_callback(GLFWwindow* window) {
  1372. struct demo* demo = glfwGetWindowUserPointer(window);
  1373. demo_draw(demo);
  1374. }
  1375. static void demo_resize_callback(GLFWwindow* window, int width, int height) {
  1376. struct demo* demo = glfwGetWindowUserPointer(window);
  1377. demo->width = width;
  1378. demo->height = height;
  1379. demo_resize(demo);
  1380. }
  1381. static void demo_run(struct demo *demo) {
  1382. while (!glfwWindowShouldClose(demo->window)) {
  1383. glfwPollEvents();
  1384. demo_draw(demo);
  1385. if (demo->depthStencil > 0.99f)
  1386. demo->depthIncrement = -0.001f;
  1387. if (demo->depthStencil < 0.8f)
  1388. demo->depthIncrement = 0.001f;
  1389. demo->depthStencil += demo->depthIncrement;
  1390. // Wait for work to finish before updating MVP.
  1391. vkDeviceWaitIdle(demo->device);
  1392. }
  1393. }
  1394. static void demo_create_window(struct demo *demo) {
  1395. glfwWindowHint(GLFW_CLIENT_API, GLFW_NO_API);
  1396. demo->window = glfwCreateWindow(demo->width,
  1397. demo->height,
  1398. APP_LONG_NAME,
  1399. NULL,
  1400. NULL);
  1401. if (!demo->window) {
  1402. // It didn't work, so try to give a useful error:
  1403. printf("Cannot create a window in which to draw!\n");
  1404. fflush(stdout);
  1405. exit(1);
  1406. }
  1407. glfwSetWindowUserPointer(demo->window, demo);
  1408. glfwSetWindowRefreshCallback(demo->window, demo_refresh_callback);
  1409. glfwSetFramebufferSizeCallback(demo->window, demo_resize_callback);
  1410. glfwSetKeyCallback(demo->window, demo_key_callback);
  1411. }
  1412. /*
  1413. * Return 1 (true) if all layer names specified in check_names
  1414. * can be found in given layer properties.
  1415. */
  1416. static VkBool32 demo_check_layers(uint32_t check_count, char **check_names,
  1417. uint32_t layer_count,
  1418. VkLayerProperties *layers) {
  1419. uint32_t i, j;
  1420. for (i = 0; i < check_count; i++) {
  1421. VkBool32 found = 0;
  1422. for (j = 0; j < layer_count; j++) {
  1423. if (!strcmp(check_names[i], layers[j].layerName)) {
  1424. found = 1;
  1425. break;
  1426. }
  1427. }
  1428. if (!found) {
  1429. fprintf(stderr, "Cannot find layer: %s\n", check_names[i]);
  1430. return 0;
  1431. }
  1432. }
  1433. return 1;
  1434. }
  1435. VKAPI_ATTR void *VKAPI_CALL myrealloc(void *pUserData, void *pOriginal,
  1436. size_t size, size_t alignment,
  1437. VkSystemAllocationScope allocationScope) {
  1438. return realloc(pOriginal, size);
  1439. }
  1440. VKAPI_ATTR void *VKAPI_CALL myalloc(void *pUserData, size_t size,
  1441. size_t alignment,
  1442. VkSystemAllocationScope allocationScope) {
  1443. #ifdef _MSC_VER
  1444. return _aligned_malloc(size, alignment);
  1445. #else
  1446. return aligned_alloc(alignment, size);
  1447. #endif
  1448. }
  1449. VKAPI_ATTR void VKAPI_CALL myfree(void *pUserData, void *pMemory) {
  1450. #ifdef _MSC_VER
  1451. _aligned_free(pMemory);
  1452. #else
  1453. free(pMemory);
  1454. #endif
  1455. }
  1456. static void demo_init_vk(struct demo *demo) {
  1457. VkResult err;
  1458. uint32_t required_extension_count;
  1459. const char** required_extensions;
  1460. uint32_t i;
  1461. uint32_t instance_extension_count = 0;
  1462. uint32_t instance_layer_count = 0;
  1463. uint32_t device_validation_layer_count = 0;
  1464. demo->enabled_extension_count = 0;
  1465. demo->enabled_layer_count = 0;
  1466. char *instance_validation_layers[] = {
  1467. "VK_LAYER_LUNARG_mem_tracker",
  1468. "VK_LAYER_GOOGLE_unique_objects",
  1469. };
  1470. demo->device_validation_layers[0] = "VK_LAYER_LUNARG_mem_tracker";
  1471. demo->device_validation_layers[1] = "VK_LAYER_GOOGLE_unique_objects";
  1472. device_validation_layer_count = 2;
  1473. /* Look for validation layers */
  1474. VkBool32 validation_found = 0;
  1475. err = vkEnumerateInstanceLayerProperties(&instance_layer_count, NULL);
  1476. assert(!err);
  1477. if (instance_layer_count > 0) {
  1478. VkLayerProperties *instance_layers =
  1479. malloc(sizeof(VkLayerProperties) * instance_layer_count);
  1480. err = vkEnumerateInstanceLayerProperties(&instance_layer_count,
  1481. instance_layers);
  1482. assert(!err);
  1483. if (demo->validate) {
  1484. validation_found = demo_check_layers(
  1485. ARRAY_SIZE(instance_validation_layers),
  1486. instance_validation_layers, instance_layer_count,
  1487. instance_layers);
  1488. demo->enabled_layer_count = ARRAY_SIZE(instance_validation_layers);
  1489. }
  1490. free(instance_layers);
  1491. }
  1492. if (demo->validate && !validation_found) {
  1493. ERR_EXIT("vkEnumerateInstanceLayerProperties failed to find"
  1494. "required validation layer.\n\n"
  1495. "Please look at the Getting Started guide for additional "
  1496. "information.\n",
  1497. "vkCreateInstance Failure");
  1498. }
  1499. /* Look for instance extensions */
  1500. required_extensions = glfwGetRequiredInstanceExtensions(&required_extension_count);
  1501. if (!required_extensions) {
  1502. ERR_EXIT("glfwGetRequiredInstanceExtensions failed to find the "
  1503. "platform surface extensions.\n\nDo you have a compatible "
  1504. "Vulkan installable client driver (ICD) installed?\nPlease "
  1505. "look at the Getting Started guide for additional "
  1506. "information.\n",
  1507. "vkCreateInstance Failure");
  1508. }
  1509. for (i = 0; i < required_extension_count; i++) {
  1510. demo->extension_names[demo->enabled_extension_count++] = required_extensions[i];
  1511. assert(demo->enabled_extension_count < 64);
  1512. }
  1513. err = vkEnumerateInstanceExtensionProperties(
  1514. NULL, &instance_extension_count, NULL);
  1515. assert(!err);
  1516. if (instance_extension_count > 0) {
  1517. VkExtensionProperties *instance_extensions =
  1518. malloc(sizeof(VkExtensionProperties) * instance_extension_count);
  1519. err = vkEnumerateInstanceExtensionProperties(
  1520. NULL, &instance_extension_count, instance_extensions);
  1521. assert(!err);
  1522. for (i = 0; i < instance_extension_count; i++) {
  1523. if (!strcmp(VK_EXT_DEBUG_REPORT_EXTENSION_NAME,
  1524. instance_extensions[i].extensionName)) {
  1525. if (demo->validate) {
  1526. demo->extension_names[demo->enabled_extension_count++] =
  1527. VK_EXT_DEBUG_REPORT_EXTENSION_NAME;
  1528. }
  1529. }
  1530. assert(demo->enabled_extension_count < 64);
  1531. }
  1532. free(instance_extensions);
  1533. }
  1534. const VkApplicationInfo app = {
  1535. .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
  1536. .pNext = NULL,
  1537. .pApplicationName = APP_SHORT_NAME,
  1538. .applicationVersion = 0,
  1539. .pEngineName = APP_SHORT_NAME,
  1540. .engineVersion = 0,
  1541. .apiVersion = VK_API_VERSION_1_0,
  1542. };
  1543. VkInstanceCreateInfo inst_info = {
  1544. .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
  1545. .pNext = NULL,
  1546. .pApplicationInfo = &app,
  1547. .enabledLayerCount = demo->enabled_layer_count,
  1548. .ppEnabledLayerNames = (const char *const *)instance_validation_layers,
  1549. .enabledExtensionCount = demo->enabled_extension_count,
  1550. .ppEnabledExtensionNames = (const char *const *)demo->extension_names,
  1551. };
  1552. uint32_t gpu_count;
  1553. demo->allocator.pfnAllocation = myalloc;
  1554. demo->allocator.pfnFree = myfree;
  1555. demo->allocator.pfnReallocation = myrealloc;
  1556. err = vkCreateInstance(&inst_info, &demo->allocator, &demo->inst);
  1557. if (err == VK_ERROR_INCOMPATIBLE_DRIVER) {
  1558. ERR_EXIT("Cannot find a compatible Vulkan installable client driver "
  1559. "(ICD).\n\nPlease look at the Getting Started guide for "
  1560. "additional information.\n",
  1561. "vkCreateInstance Failure");
  1562. } else if (err == VK_ERROR_EXTENSION_NOT_PRESENT) {
  1563. ERR_EXIT("Cannot find a specified extension library"
  1564. ".\nMake sure your layers path is set appropriately\n",
  1565. "vkCreateInstance Failure");
  1566. } else if (err) {
  1567. ERR_EXIT("vkCreateInstance failed.\n\nDo you have a compatible Vulkan "
  1568. "installable client driver (ICD) installed?\nPlease look at "
  1569. "the Getting Started guide for additional information.\n",
  1570. "vkCreateInstance Failure");
  1571. }
  1572. /* Make initial call to query gpu_count, then second call for gpu info*/
  1573. err = vkEnumeratePhysicalDevices(demo->inst, &gpu_count, NULL);
  1574. assert(!err && gpu_count > 0);
  1575. if (gpu_count > 0) {
  1576. VkPhysicalDevice *physical_devices =
  1577. malloc(sizeof(VkPhysicalDevice) * gpu_count);
  1578. err = vkEnumeratePhysicalDevices(demo->inst, &gpu_count,
  1579. physical_devices);
  1580. assert(!err);
  1581. /* For tri demo we just grab the first physical device */
  1582. demo->gpu = physical_devices[0];
  1583. free(physical_devices);
  1584. } else {
  1585. ERR_EXIT("vkEnumeratePhysicalDevices reported zero accessible devices."
  1586. "\n\nDo you have a compatible Vulkan installable client"
  1587. " driver (ICD) installed?\nPlease look at the Getting Started"
  1588. " guide for additional information.\n",
  1589. "vkEnumeratePhysicalDevices Failure");
  1590. }
  1591. /* Look for validation layers */
  1592. validation_found = 0;
  1593. demo->enabled_layer_count = 0;
  1594. uint32_t device_layer_count = 0;
  1595. err =
  1596. vkEnumerateDeviceLayerProperties(demo->gpu, &device_layer_count, NULL);
  1597. assert(!err);
  1598. if (device_layer_count > 0) {
  1599. VkLayerProperties *device_layers =
  1600. malloc(sizeof(VkLayerProperties) * device_layer_count);
  1601. err = vkEnumerateDeviceLayerProperties(demo->gpu, &device_layer_count,
  1602. device_layers);
  1603. assert(!err);
  1604. if (demo->validate) {
  1605. validation_found = demo_check_layers(device_validation_layer_count,
  1606. demo->device_validation_layers,
  1607. device_layer_count,
  1608. device_layers);
  1609. demo->enabled_layer_count = device_validation_layer_count;
  1610. }
  1611. free(device_layers);
  1612. }
  1613. if (demo->validate && !validation_found) {
  1614. ERR_EXIT("vkEnumerateDeviceLayerProperties failed to find "
  1615. "a required validation layer.\n\n"
  1616. "Please look at the Getting Started guide for additional "
  1617. "information.\n",
  1618. "vkCreateDevice Failure");
  1619. }
  1620. /* Look for device extensions */
  1621. uint32_t device_extension_count = 0;
  1622. VkBool32 swapchainExtFound = 0;
  1623. demo->enabled_extension_count = 0;
  1624. err = vkEnumerateDeviceExtensionProperties(demo->gpu, NULL,
  1625. &device_extension_count, NULL);
  1626. assert(!err);
  1627. if (device_extension_count > 0) {
  1628. VkExtensionProperties *device_extensions =
  1629. malloc(sizeof(VkExtensionProperties) * device_extension_count);
  1630. err = vkEnumerateDeviceExtensionProperties(
  1631. demo->gpu, NULL, &device_extension_count, device_extensions);
  1632. assert(!err);
  1633. for (i = 0; i < device_extension_count; i++) {
  1634. if (!strcmp(VK_KHR_SWAPCHAIN_EXTENSION_NAME,
  1635. device_extensions[i].extensionName)) {
  1636. swapchainExtFound = 1;
  1637. demo->extension_names[demo->enabled_extension_count++] =
  1638. VK_KHR_SWAPCHAIN_EXTENSION_NAME;
  1639. }
  1640. assert(demo->enabled_extension_count < 64);
  1641. }
  1642. free(device_extensions);
  1643. }
  1644. if (!swapchainExtFound) {
  1645. ERR_EXIT("vkEnumerateDeviceExtensionProperties failed to find "
  1646. "the " VK_KHR_SWAPCHAIN_EXTENSION_NAME
  1647. " extension.\n\nDo you have a compatible "
  1648. "Vulkan installable client driver (ICD) installed?\nPlease "
  1649. "look at the Getting Started guide for additional "
  1650. "information.\n",
  1651. "vkCreateInstance Failure");
  1652. }
  1653. if (demo->validate) {
  1654. demo->CreateDebugReportCallback =
  1655. (PFN_vkCreateDebugReportCallbackEXT)vkGetInstanceProcAddr(
  1656. demo->inst, "vkCreateDebugReportCallbackEXT");
  1657. if (!demo->CreateDebugReportCallback) {
  1658. ERR_EXIT(
  1659. "GetProcAddr: Unable to find vkCreateDebugReportCallbackEXT\n",
  1660. "vkGetProcAddr Failure");
  1661. }
  1662. VkDebugReportCallbackCreateInfoEXT dbgCreateInfo;
  1663. dbgCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
  1664. dbgCreateInfo.flags =
  1665. VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT;
  1666. dbgCreateInfo.pfnCallback = dbgFunc;
  1667. dbgCreateInfo.pUserData = NULL;
  1668. dbgCreateInfo.pNext = NULL;
  1669. err = demo->CreateDebugReportCallback(demo->inst, &dbgCreateInfo, NULL,
  1670. &demo->msg_callback);
  1671. switch (err) {
  1672. case VK_SUCCESS:
  1673. break;
  1674. case VK_ERROR_OUT_OF_HOST_MEMORY:
  1675. ERR_EXIT("CreateDebugReportCallback: out of host memory\n",
  1676. "CreateDebugReportCallback Failure");
  1677. break;
  1678. default:
  1679. ERR_EXIT("CreateDebugReportCallback: unknown failure\n",
  1680. "CreateDebugReportCallback Failure");
  1681. break;
  1682. }
  1683. }
  1684. // Having these GIPA queries of device extension entry points both
  1685. // BEFORE and AFTER vkCreateDevice is a good test for the loader
  1686. GET_INSTANCE_PROC_ADDR(demo->inst, GetPhysicalDeviceSurfaceCapabilitiesKHR);
  1687. GET_INSTANCE_PROC_ADDR(demo->inst, GetPhysicalDeviceSurfaceFormatsKHR);
  1688. GET_INSTANCE_PROC_ADDR(demo->inst, GetPhysicalDeviceSurfacePresentModesKHR);
  1689. GET_INSTANCE_PROC_ADDR(demo->inst, GetPhysicalDeviceSurfaceSupportKHR);
  1690. GET_INSTANCE_PROC_ADDR(demo->inst, CreateSwapchainKHR);
  1691. GET_INSTANCE_PROC_ADDR(demo->inst, DestroySwapchainKHR);
  1692. GET_INSTANCE_PROC_ADDR(demo->inst, GetSwapchainImagesKHR);
  1693. GET_INSTANCE_PROC_ADDR(demo->inst, AcquireNextImageKHR);
  1694. GET_INSTANCE_PROC_ADDR(demo->inst, QueuePresentKHR);
  1695. vkGetPhysicalDeviceProperties(demo->gpu, &demo->gpu_props);
  1696. // Query with NULL data to get count
  1697. vkGetPhysicalDeviceQueueFamilyProperties(demo->gpu, &demo->queue_count,
  1698. NULL);
  1699. demo->queue_props = (VkQueueFamilyProperties *)malloc(
  1700. demo->queue_count * sizeof(VkQueueFamilyProperties));
  1701. vkGetPhysicalDeviceQueueFamilyProperties(demo->gpu, &demo->queue_count,
  1702. demo->queue_props);
  1703. assert(demo->queue_count >= 1);
  1704. // Graphics queue and MemMgr queue can be separate.
  1705. // TODO: Add support for separate queues, including synchronization,
  1706. // and appropriate tracking for QueueSubmit
  1707. }
  1708. static void demo_init_device(struct demo *demo) {
  1709. VkResult U_ASSERT_ONLY err;
  1710. float queue_priorities[1] = {0.0};
  1711. const VkDeviceQueueCreateInfo queue = {
  1712. .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
  1713. .pNext = NULL,
  1714. .queueFamilyIndex = demo->graphics_queue_node_index,
  1715. .queueCount = 1,
  1716. .pQueuePriorities = queue_priorities};
  1717. VkDeviceCreateInfo device = {
  1718. .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
  1719. .pNext = NULL,
  1720. .queueCreateInfoCount = 1,
  1721. .pQueueCreateInfos = &queue,
  1722. .enabledLayerCount = demo->enabled_layer_count,
  1723. .ppEnabledLayerNames =
  1724. (const char *const *)((demo->validate)
  1725. ? demo->device_validation_layers
  1726. : NULL),
  1727. .enabledExtensionCount = demo->enabled_extension_count,
  1728. .ppEnabledExtensionNames = (const char *const *)demo->extension_names,
  1729. };
  1730. err = vkCreateDevice(demo->gpu, &device, NULL, &demo->device);
  1731. assert(!err);
  1732. GET_DEVICE_PROC_ADDR(demo->device, CreateSwapchainKHR);
  1733. GET_DEVICE_PROC_ADDR(demo->device, DestroySwapchainKHR);
  1734. GET_DEVICE_PROC_ADDR(demo->device, GetSwapchainImagesKHR);
  1735. GET_DEVICE_PROC_ADDR(demo->device, AcquireNextImageKHR);
  1736. GET_DEVICE_PROC_ADDR(demo->device, QueuePresentKHR);
  1737. }
  1738. static void demo_init_vk_swapchain(struct demo *demo) {
  1739. VkResult U_ASSERT_ONLY err;
  1740. uint32_t i;
  1741. // Create a WSI surface for the window:
  1742. glfwCreateWindowSurface(demo->inst, demo->window, NULL, &demo->surface);
  1743. // Iterate over each queue to learn whether it supports presenting:
  1744. VkBool32 *supportsPresent =
  1745. (VkBool32 *)malloc(demo->queue_count * sizeof(VkBool32));
  1746. for (i = 0; i < demo->queue_count; i++) {
  1747. demo->fpGetPhysicalDeviceSurfaceSupportKHR(demo->gpu, i, demo->surface,
  1748. &supportsPresent[i]);
  1749. }
  1750. // Search for a graphics and a present queue in the array of queue
  1751. // families, try to find one that supports both
  1752. uint32_t graphicsQueueNodeIndex = UINT32_MAX;
  1753. uint32_t presentQueueNodeIndex = UINT32_MAX;
  1754. for (i = 0; i < demo->queue_count; i++) {
  1755. if ((demo->queue_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
  1756. if (graphicsQueueNodeIndex == UINT32_MAX) {
  1757. graphicsQueueNodeIndex = i;
  1758. }
  1759. if (supportsPresent[i] == VK_TRUE) {
  1760. graphicsQueueNodeIndex = i;
  1761. presentQueueNodeIndex = i;
  1762. break;
  1763. }
  1764. }
  1765. }
  1766. if (presentQueueNodeIndex == UINT32_MAX) {
  1767. // If didn't find a queue that supports both graphics and present, then
  1768. // find a separate present queue.
  1769. for (i = 0; i < demo->queue_count; ++i) {
  1770. if (supportsPresent[i] == VK_TRUE) {
  1771. presentQueueNodeIndex = i;
  1772. break;
  1773. }
  1774. }
  1775. }
  1776. free(supportsPresent);
  1777. // Generate error if could not find both a graphics and a present queue
  1778. if (graphicsQueueNodeIndex == UINT32_MAX ||
  1779. presentQueueNodeIndex == UINT32_MAX) {
  1780. ERR_EXIT("Could not find a graphics and a present queue\n",
  1781. "Swapchain Initialization Failure");
  1782. }
  1783. // TODO: Add support for separate queues, including presentation,
  1784. // synchronization, and appropriate tracking for QueueSubmit.
  1785. // NOTE: While it is possible for an application to use a separate graphics
  1786. // and a present queues, this demo program assumes it is only using
  1787. // one:
  1788. if (graphicsQueueNodeIndex != presentQueueNodeIndex) {
  1789. ERR_EXIT("Could not find a common graphics and a present queue\n",
  1790. "Swapchain Initialization Failure");
  1791. }
  1792. demo->graphics_queue_node_index = graphicsQueueNodeIndex;
  1793. demo_init_device(demo);
  1794. vkGetDeviceQueue(demo->device, demo->graphics_queue_node_index, 0,
  1795. &demo->queue);
  1796. // Get the list of VkFormat's that are supported:
  1797. uint32_t formatCount;
  1798. err = demo->fpGetPhysicalDeviceSurfaceFormatsKHR(demo->gpu, demo->surface,
  1799. &formatCount, NULL);
  1800. assert(!err);
  1801. VkSurfaceFormatKHR *surfFormats =
  1802. (VkSurfaceFormatKHR *)malloc(formatCount * sizeof(VkSurfaceFormatKHR));
  1803. err = demo->fpGetPhysicalDeviceSurfaceFormatsKHR(demo->gpu, demo->surface,
  1804. &formatCount, surfFormats);
  1805. assert(!err);
  1806. // If the format list includes just one entry of VK_FORMAT_UNDEFINED,
  1807. // the surface has no preferred format. Otherwise, at least one
  1808. // supported format will be returned.
  1809. if (formatCount == 1 && surfFormats[0].format == VK_FORMAT_UNDEFINED) {
  1810. demo->format = VK_FORMAT_B8G8R8A8_UNORM;
  1811. } else {
  1812. assert(formatCount >= 1);
  1813. demo->format = surfFormats[0].format;
  1814. }
  1815. demo->color_space = surfFormats[0].colorSpace;
  1816. // Get Memory information and properties
  1817. vkGetPhysicalDeviceMemoryProperties(demo->gpu, &demo->memory_properties);
  1818. }
  1819. static void demo_init_connection(struct demo *demo) {
  1820. glfwSetErrorCallback(demo_error_callback);
  1821. if (!glfwInit()) {
  1822. printf("Cannot initialize GLFW.\nExiting ...\n");
  1823. fflush(stdout);
  1824. exit(1);
  1825. }
  1826. if (!glfwVulkanSupported()) {
  1827. printf("GLFW failed to find the Vulkan loader.\nExiting ...\n");
  1828. fflush(stdout);
  1829. exit(1);
  1830. }
  1831. }
  1832. static void demo_init(struct demo *demo, const int argc, const char *argv[])
  1833. {
  1834. int i;
  1835. memset(demo, 0, sizeof(*demo));
  1836. for (i = 0; i < argc; i++) {
  1837. if (strncmp(argv[i], "--use_staging", strlen("--use_staging")) == 0)
  1838. demo->use_staging_buffer = true;
  1839. }
  1840. demo_init_connection(demo);
  1841. demo_init_vk(demo);
  1842. demo->width = 300;
  1843. demo->height = 300;
  1844. demo->depthStencil = 1.0;
  1845. demo->depthIncrement = -0.01f;
  1846. }
  1847. static void demo_cleanup(struct demo *demo) {
  1848. uint32_t i;
  1849. for (i = 0; i < demo->swapchainImageCount; i++) {
  1850. vkDestroyFramebuffer(demo->device, demo->framebuffers[i], NULL);
  1851. }
  1852. free(demo->framebuffers);
  1853. vkDestroyDescriptorPool(demo->device, demo->desc_pool, NULL);
  1854. if (demo->setup_cmd) {
  1855. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->setup_cmd);
  1856. }
  1857. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->draw_cmd);
  1858. vkDestroyCommandPool(demo->device, demo->cmd_pool, NULL);
  1859. vkDestroyPipeline(demo->device, demo->pipeline, NULL);
  1860. vkDestroyRenderPass(demo->device, demo->render_pass, NULL);
  1861. vkDestroyPipelineLayout(demo->device, demo->pipeline_layout, NULL);
  1862. vkDestroyDescriptorSetLayout(demo->device, demo->desc_layout, NULL);
  1863. vkDestroyBuffer(demo->device, demo->vertices.buf, NULL);
  1864. vkFreeMemory(demo->device, demo->vertices.mem, NULL);
  1865. for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
  1866. vkDestroyImageView(demo->device, demo->textures[i].view, NULL);
  1867. vkDestroyImage(demo->device, demo->textures[i].image, NULL);
  1868. vkFreeMemory(demo->device, demo->textures[i].mem, NULL);
  1869. vkDestroySampler(demo->device, demo->textures[i].sampler, NULL);
  1870. }
  1871. for (i = 0; i < demo->swapchainImageCount; i++) {
  1872. vkDestroyImageView(demo->device, demo->buffers[i].view, NULL);
  1873. }
  1874. vkDestroyImageView(demo->device, demo->depth.view, NULL);
  1875. vkDestroyImage(demo->device, demo->depth.image, NULL);
  1876. vkFreeMemory(demo->device, demo->depth.mem, NULL);
  1877. demo->fpDestroySwapchainKHR(demo->device, demo->swapchain, NULL);
  1878. free(demo->buffers);
  1879. vkDestroyDevice(demo->device, NULL);
  1880. vkDestroySurfaceKHR(demo->inst, demo->surface, NULL);
  1881. vkDestroyInstance(demo->inst, &demo->allocator);
  1882. free(demo->queue_props);
  1883. glfwDestroyWindow(demo->window);
  1884. glfwTerminate();
  1885. }
  1886. static void demo_resize(struct demo *demo) {
  1887. uint32_t i;
  1888. for (i = 0; i < demo->swapchainImageCount; i++) {
  1889. vkDestroyFramebuffer(demo->device, demo->framebuffers[i], NULL);
  1890. }
  1891. free(demo->framebuffers);
  1892. vkDestroyDescriptorPool(demo->device, demo->desc_pool, NULL);
  1893. if (demo->setup_cmd) {
  1894. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->setup_cmd);
  1895. }
  1896. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->draw_cmd);
  1897. vkDestroyCommandPool(demo->device, demo->cmd_pool, NULL);
  1898. vkDestroyPipeline(demo->device, demo->pipeline, NULL);
  1899. vkDestroyRenderPass(demo->device, demo->render_pass, NULL);
  1900. vkDestroyPipelineLayout(demo->device, demo->pipeline_layout, NULL);
  1901. vkDestroyDescriptorSetLayout(demo->device, demo->desc_layout, NULL);
  1902. vkDestroyBuffer(demo->device, demo->vertices.buf, NULL);
  1903. vkFreeMemory(demo->device, demo->vertices.mem, NULL);
  1904. for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
  1905. vkDestroyImageView(demo->device, demo->textures[i].view, NULL);
  1906. vkDestroyImage(demo->device, demo->textures[i].image, NULL);
  1907. vkFreeMemory(demo->device, demo->textures[i].mem, NULL);
  1908. vkDestroySampler(demo->device, demo->textures[i].sampler, NULL);
  1909. }
  1910. for (i = 0; i < demo->swapchainImageCount; i++) {
  1911. vkDestroyImageView(demo->device, demo->buffers[i].view, NULL);
  1912. }
  1913. vkDestroyImageView(demo->device, demo->depth.view, NULL);
  1914. vkDestroyImage(demo->device, demo->depth.image, NULL);
  1915. vkFreeMemory(demo->device, demo->depth.mem, NULL);
  1916. free(demo->buffers);
  1917. // Second, re-perform the demo_prepare() function, which will re-create the
  1918. // swapchain:
  1919. demo_prepare(demo);
  1920. }
  1921. int main(const int argc, const char *argv[]) {
  1922. struct demo demo;
  1923. demo_init(&demo, argc, argv);
  1924. demo_create_window(&demo);
  1925. demo_init_vk_swapchain(&demo);
  1926. demo_prepare(&demo);
  1927. demo_run(&demo);
  1928. demo_cleanup(&demo);
  1929. return 0;
  1930. }