├── .gitignore ├── Include └── Neko │ └── Neko.h ├── LICENSE ├── README.md ├── Resources └── neko.png └── Samples ├── 01_Triangle ├── CMakeLists.txt ├── Triangle.c └── Triangle.hlsl ├── 02_Cubes ├── CMakeLists.txt └── main.c ├── CMakeLists.txt ├── Modules └── ShaderCompiler.cmake ├── Neko ├── CMakeLists.txt └── Source │ └── Neko.c ├── SampleBase ├── CMakeLists.txt ├── Include │ └── Neko │ │ └── Sample.h └── Source │ └── SampleBase.c └── ShaderConductor └── CMakeLists.txt /.gitignore: -------------------------------------------------------------------------------- 1 | Build/* -------------------------------------------------------------------------------- /Include/Neko/Neko.h: -------------------------------------------------------------------------------- 1 | #ifndef NEKO_H 2 | #define NEKO_H 3 | 4 | /* 5 | The latest version of this library is available on GitHub; 6 | https://github.com/nyalloc/neko 7 | 8 | Neko is a graphics API abstraction layer. It intends to sit on top of Vulkan, D3D12 and Metal and bring 9 | a higher-level Metal / WebGPU style interface. The intended users are developers who want to quickly 10 | prototype or work on small game engines. I intend to bring the most important capabilities of recent 11 | APIs, but restore some of the joy working with higher-level APIs. 12 | https://twitter.com/aras_p/status/1313692668095012875 13 | 14 | Largely inspired by Andre Weissflog's sokol-gfx, but intends to bring more of the current-gen graphics 15 | API features. 16 | https://twitter.com/FlohOfWoe/status/1328683854195003392 17 | 18 | Neko is under construction. It's gonna be nasty for a while, and the API will 100% change. Bear with me! 19 | 20 | TODOs: 21 | 22 | There are hacks here, and most of them are in the interest of getting things done while I'm still sane. 23 | I fully intend to go through all these issues and iron over the issues and flesh out the oversimplifications. 24 | However I am but one person and this is largely experimental. Set your expectations accordingly. 25 | 26 | * Handles 27 | Right now each neko object is an opaque pointer handle. These need to be individually allocated, which isn't 28 | great. The user can provide their own malloc through the NK_MALLOC macro, but most people won't. So I wonder 29 | if we can do something to improve the default performance here. We can replace the opaque pointers with integer 30 | ids, using sparse sets to maintain a list of unique ids, and each external handle is just an index into an 31 | internal array. If we continue using opaque pointers, we can use a small object allocator. 32 | 33 | * Device selection 34 | To make things simple for myself, I'm avoiding exposing an API for in-depth device selection until I've got 35 | more interesting problems solved. Right now, the CreateDevice() function will simply ensure that the device 36 | being used is correct for the backend and is compatible with a given surface. Maybe I'll borrow from SYCL and 37 | expose device selector functions that let users supply a function. I think most people find the Adapter / 38 | PhysicalDevice API style kind of confusing to begin with. 39 | 40 | * User-specified swapchain format 41 | Right now the swapchain format is chosen by the backend for simplicity. The user might want to do that themselves. 42 | 43 | * Custom allocator support 44 | Allocator structs to every create function that can allocate. Just use malloc / free until everything is working. 45 | Only after that will I refactor to make functions allocator-friendly. 46 | 47 | * Recoverable errors 48 | Right now, errors are not reported back to the user in an actionable way. They're driven through assertions. 49 | Introduce "Try" variants of functions that can fail. They will return error codes and return their values via 50 | arguments. E.g. NkInstance nkCreateInstance() -> NkResult nkTryCreateInstance(NkInstance* instance). To prevent 51 | code duplication, the old versions of the functions can invoke the Try* version and assert that the result is 52 | a success. 53 | 54 | * Configurable present modes (FIFO, MAILBOX...) 55 | 56 | * Different backends 57 | Focusing on Vulkan first. D3D12, Metal and WebGPU as pipedream goals. Not interested in OpenGL support, mapping 58 | OpenGL to this API sounds like a lot of work with little to no benefit. 59 | 60 | * Runtime backend selection 61 | Not sure if this is practically useful yet, but it could be interesting to let multiple backends be used at 62 | runtime or let the user decide which backend to use at runtime. Not worth investing effort into until there is 63 | a lot more done. 64 | 65 | * C++ wrapper 66 | C is good for libraries, but I think the majority of people will be using C libraries from a C++ application. 67 | I'd like to introduce a very thin header-only C++ API wrapper for this library, like Vulkan-Hpp, or mtlpp. 68 | */ 69 | 70 | #if defined(_WIN32) 71 | # define NK_ALIGN_OF(x) __alignof(x) 72 | # define NK_ALIGN_AS(x) __declspec(align(x)) 73 | # if defined(NK_SHARED_LIBRARY) 74 | # if defined(NK_IMPLEMENTATION) 75 | # define NK_EXPORT __declspec(dllexport) 76 | # else 77 | # define NK_EXPORT __declspec(dllimport) 78 | # endif 79 | # else 80 | # define NK_EXPORT 81 | # endif 82 | typedef struct HINSTANCE__* HINSTANCE; 83 | typedef struct HWND__* HWND; 84 | #else 85 | # define NK_ALIGN_OF(x) __alignof__(x) 86 | # define NK_ALIGN_AS(x) __attribute__ ((__aligned__(x))) 87 | # if defined(NK_SHARED_LIBRARY) 88 | # if defined(NK_IMPLEMENTATION) 89 | # define NK_EXPORT __attribute__((visibility("default"))) 90 | # else 91 | # define NK_EXPORT 92 | # endif 93 | # else 94 | # define NK_EXPORT 95 | # endif 96 | #endif 97 | 98 | #include 99 | 100 | typedef uint32_t NkFlags; 101 | 102 | typedef struct NkBindGroupImpl* NkBindGroup; 103 | typedef struct NkBindGroupLayoutImpl* NkBindGroupLayout; 104 | typedef struct NkBufferImpl* NkBuffer; 105 | typedef struct NkCommandBufferImpl* NkCommandBuffer; 106 | typedef struct NkCommandEncoderImpl* NkCommandEncoder; 107 | typedef struct NkComputePassEncoderImpl* NkComputePassEncoder; 108 | typedef struct NkComputePipelineImpl* NkComputePipeline; 109 | typedef struct NkDeviceImpl* NkDevice; 110 | typedef struct NkFenceImpl* NkFence; 111 | typedef struct NkInstanceImpl* NkInstance; 112 | typedef struct NkPipelineLayoutImpl* NkPipelineLayout; 113 | typedef struct NkQuerySetImpl* NkQuerySet; 114 | typedef struct NkQueueImpl* NkQueue; 115 | typedef struct NkRenderBundleImpl* NkRenderBundle; 116 | typedef struct NkRenderBundleEncoderImpl* NkRenderBundleEncoder; 117 | typedef struct NkRenderPassEncoderImpl* NkRenderPassEncoder; 118 | typedef struct NkRenderPipelineImpl* NkRenderPipeline; 119 | typedef struct NkSamplerImpl* NkSampler; 120 | typedef struct NkShaderModuleImpl* NkShaderModule; 121 | typedef struct NkSurfaceImpl* NkSurface; 122 | typedef struct NkSwapChainImpl* NkSwapChain; 123 | typedef struct NkTextureImpl* NkTexture; 124 | typedef struct NkTextureViewImpl* NkTextureView; 125 | 126 | typedef enum NkBool { 127 | NkFalse, 128 | NkTrue 129 | } NkBool; 130 | 131 | typedef enum NkDeviceType { 132 | NkDeviceType_DiscreteGPU = 0x00000000, 133 | NkDeviceType_IntegratedGPU = 0x00000001, 134 | NkDeviceType_CPU = 0x00000002, 135 | NkDeviceType_Unknown = 0x00000003, 136 | NkDeviceType_Force32 = 0x7FFFFFFF 137 | } NkDeviceType; 138 | 139 | typedef enum NkAddressMode { 140 | NkAddressMode_Repeat = 0x00000000, 141 | NkAddressMode_MirrorRepeat = 0x00000001, 142 | NkAddressMode_ClampToEdge = 0x00000002, 143 | NkAddressMode_Force32 = 0x7FFFFFFF 144 | } NkAddressMode; 145 | 146 | typedef enum NkBindingType { 147 | NkBindingType_UniformBuffer = 0x00000000, 148 | NkBindingType_StorageBuffer = 0x00000001, 149 | NkBindingType_ReadonlyStorageBuffer = 0x00000002, 150 | NkBindingType_Sampler = 0x00000003, 151 | NkBindingType_ComparisonSampler = 0x00000004, 152 | NkBindingType_SampledTexture = 0x00000005, 153 | NkBindingType_MultisampledTexture = 0x00000006, 154 | NkBindingType_ReadonlyStorageTexture = 0x00000007, 155 | NkBindingType_WriteonlyStorageTexture = 0x00000008, 156 | NkBindingType_Force32 = 0x7FFFFFFF 157 | } NkBindingType; 158 | 159 | typedef enum NkBlendFactor { 160 | NkBlendFactor_Zero = 0x00000000, 161 | NkBlendFactor_One = 0x00000001, 162 | NkBlendFactor_SrcColor = 0x00000002, 163 | NkBlendFactor_OneMinusSrcColor = 0x00000003, 164 | NkBlendFactor_SrcAlpha = 0x00000004, 165 | NkBlendFactor_OneMinusSrcAlpha = 0x00000005, 166 | NkBlendFactor_DstColor = 0x00000006, 167 | NkBlendFactor_OneMinusDstColor = 0x00000007, 168 | NkBlendFactor_DstAlpha = 0x00000008, 169 | NkBlendFactor_OneMinusDstAlpha = 0x00000009, 170 | NkBlendFactor_SrcAlphaSaturated = 0x0000000A, 171 | NkBlendFactor_BlendColor = 0x0000000B, 172 | NkBlendFactor_OneMinusBlendColor = 0x0000000C, 173 | NkBlendFactor_Force32 = 0x7FFFFFFF 174 | } NkBlendFactor; 175 | 176 | typedef enum NkBlendOperation { 177 | NkBlendOperation_Add = 0x00000000, 178 | NkBlendOperation_Subtract = 0x00000001, 179 | NkBlendOperation_ReverseSubtract = 0x00000002, 180 | NkBlendOperation_Min = 0x00000003, 181 | NkBlendOperation_Max = 0x00000004, 182 | NkBlendOperation_Force32 = 0x7FFFFFFF 183 | } NkBlendOperation; 184 | 185 | typedef enum NkBufferMapAsyncStatus { 186 | NkBufferMapAsyncStatus_Success = 0x00000000, 187 | NkBufferMapAsyncStatus_Error = 0x00000001, 188 | NkBufferMapAsyncStatus_Unknown = 0x00000002, 189 | NkBufferMapAsyncStatus_DeviceLost = 0x00000003, 190 | NkBufferMapAsyncStatus_DestroyedBeforeCallback = 0x00000004, 191 | NkBufferMapAsyncStatus_UnmappedBeforeCallback = 0x00000005, 192 | NkBufferMapAsyncStatus_Force32 = 0x7FFFFFFF 193 | } NkBufferMapAsyncStatus; 194 | 195 | typedef enum NkCompareFunction { 196 | NkCompareFunction_Undefined = 0x00000000, 197 | NkCompareFunction_Never = 0x00000001, 198 | NkCompareFunction_Less = 0x00000002, 199 | NkCompareFunction_LessEqual = 0x00000003, 200 | NkCompareFunction_Greater = 0x00000004, 201 | NkCompareFunction_GreaterEqual = 0x00000005, 202 | NkCompareFunction_Equal = 0x00000006, 203 | NkCompareFunction_NotEqual = 0x00000007, 204 | NkCompareFunction_Always = 0x00000008, 205 | NkCompareFunction_Force32 = 0x7FFFFFFF 206 | } NkCompareFunction; 207 | 208 | typedef enum NkCreateReadyPipelineStatus { 209 | NkCreateReadyPipelineStatus_Success = 0x00000000, 210 | NkCreateReadyPipelineStatus_Error = 0x00000001, 211 | NkCreateReadyPipelineStatus_DeviceLost = 0x00000002, 212 | NkCreateReadyPipelineStatus_DeviceDestroyed = 0x00000003, 213 | NkCreateReadyPipelineStatus_Unknown = 0x00000004, 214 | NkCreateReadyPipelineStatus_Force32 = 0x7FFFFFFF 215 | } NkCreateReadyPipelineStatus; 216 | 217 | typedef enum NkCullMode { 218 | NkCullMode_None = 0x00000000, 219 | NkCullMode_Front = 0x00000001, 220 | NkCullMode_Back = 0x00000002, 221 | NkCullMode_Force32 = 0x7FFFFFFF 222 | } NkCullMode; 223 | 224 | typedef enum NkErrorFilter { 225 | NkErrorFilter_None = 0x00000000, 226 | NkErrorFilter_Validation = 0x00000001, 227 | NkErrorFilter_OutOfMemory = 0x00000002, 228 | NkErrorFilter_Force32 = 0x7FFFFFFF 229 | } NkErrorFilter; 230 | 231 | typedef enum NkErrorType { 232 | NkErrorType_NoError = 0x00000000, 233 | NkErrorType_Validation = 0x00000001, 234 | NkErrorType_OutOfMemory = 0x00000002, 235 | NkErrorType_Unknown = 0x00000003, 236 | NkErrorType_DeviceLost = 0x00000004, 237 | NkErrorType_Force32 = 0x7FFFFFFF 238 | } NkErrorType; 239 | 240 | typedef enum NkFenceCompletionStatus { 241 | NkFenceCompletionStatus_Success = 0x00000000, 242 | NkFenceCompletionStatus_Error = 0x00000001, 243 | NkFenceCompletionStatus_Unknown = 0x00000002, 244 | NkFenceCompletionStatus_DeviceLost = 0x00000003, 245 | NkFenceCompletionStatus_Force32 = 0x7FFFFFFF 246 | } NkFenceCompletionStatus; 247 | 248 | typedef enum NkFilterMode { 249 | NkFilterMode_Nearest = 0x00000000, 250 | NkFilterMode_Linear = 0x00000001, 251 | NkFilterMode_Force32 = 0x7FFFFFFF 252 | } NkFilterMode; 253 | 254 | typedef enum NkFrontFace { 255 | NkFrontFace_CCW = 0x00000000, 256 | NkFrontFace_CW = 0x00000001, 257 | NkFrontFace_Force32 = 0x7FFFFFFF 258 | } NkFrontFace; 259 | 260 | typedef enum NkIndexFormat { 261 | NkIndexFormat_Undefined = 0x00000000, 262 | NkIndexFormat_Uint16 = 0x00000001, 263 | NkIndexFormat_Uint32 = 0x00000002, 264 | NkIndexFormat_Force32 = 0x7FFFFFFF 265 | } NkIndexFormat; 266 | 267 | typedef enum NkInputStepMode { 268 | NkInputStepMode_Vertex = 0x00000000, 269 | NkInputStepMode_Instance = 0x00000001, 270 | NkInputStepMode_Force32 = 0x7FFFFFFF 271 | } NkInputStepMode; 272 | 273 | typedef enum NkLoadOp { 274 | NkLoadOp_Clear = 0x00000000, 275 | NkLoadOp_Load = 0x00000001, 276 | NkLoadOp_Force32 = 0x7FFFFFFF 277 | } NkLoadOp; 278 | 279 | typedef enum NkPipelineStatisticName { 280 | NkPipelineStatisticName_VertexShaderInvocations = 0x00000000, 281 | NkPipelineStatisticName_ClipperInvocations = 0x00000001, 282 | NkPipelineStatisticName_ClipperPrimitivesOut = 0x00000002, 283 | NkPipelineStatisticName_FragmentShaderInvocations = 0x00000003, 284 | NkPipelineStatisticName_ComputeShaderInvocations = 0x00000004, 285 | NkPipelineStatisticName_Force32 = 0x7FFFFFFF 286 | } NkPipelineStatisticName; 287 | 288 | typedef enum NkPrimitiveTopology { 289 | NkPrimitiveTopology_PointList = 0x00000000, 290 | NkPrimitiveTopology_LineList = 0x00000001, 291 | NkPrimitiveTopology_LineStrip = 0x00000002, 292 | NkPrimitiveTopology_TriangleList = 0x00000003, 293 | NkPrimitiveTopology_TriangleStrip = 0x00000004, 294 | NkPrimitiveTopology_Force32 = 0x7FFFFFFF 295 | } NkPrimitiveTopology; 296 | 297 | typedef enum NkQueryType { 298 | NkQueryType_Occlusion = 0x00000000, 299 | NkQueryType_PipelineStatistics = 0x00000001, 300 | NkQueryType_Timestamp = 0x00000002, 301 | NkQueryType_Force32 = 0x7FFFFFFF 302 | } NkQueryType; 303 | 304 | typedef enum NkStencilOperation { 305 | NkStencilOperation_Keep = 0x00000000, 306 | NkStencilOperation_Zero = 0x00000001, 307 | NkStencilOperation_Replace = 0x00000002, 308 | NkStencilOperation_Invert = 0x00000003, 309 | NkStencilOperation_IncrementClamp = 0x00000004, 310 | NkStencilOperation_DecrementClamp = 0x00000005, 311 | NkStencilOperation_IncrementWrap = 0x00000006, 312 | NkStencilOperation_DecrementWrap = 0x00000007, 313 | NkStencilOperation_Force32 = 0x7FFFFFFF 314 | } NkStencilOperation; 315 | 316 | typedef enum NkStoreOp { 317 | NkStoreOp_Store = 0x00000000, 318 | NkStoreOp_Clear = 0x00000001, 319 | NkStoreOp_Force32 = 0x7FFFFFFF 320 | } NkStoreOp; 321 | 322 | typedef enum NkTextureAspect { 323 | NkTextureAspect_All = 0x00000000, 324 | NkTextureAspect_StencilOnly = 0x00000001, 325 | NkTextureAspect_DepthOnly = 0x00000002, 326 | NkTextureAspect_Force32 = 0x7FFFFFFF 327 | } NkTextureAspect; 328 | 329 | typedef enum NkTextureComponentType { 330 | NkTextureComponentType_Float = 0x00000000, 331 | NkTextureComponentType_Sint = 0x00000001, 332 | NkTextureComponentType_Uint = 0x00000002, 333 | NkTextureComponentType_DepthComparison = 0x00000003, 334 | NkTextureComponentType_Force32 = 0x7FFFFFFF 335 | } NkTextureComponentType; 336 | 337 | typedef enum NkTextureDimension { 338 | NkTextureDimension_1D = 0x00000000, 339 | NkTextureDimension_2D = 0x00000001, 340 | NkTextureDimension_3D = 0x00000002, 341 | NkTextureDimension_Force32 = 0x7FFFFFFF 342 | } NkTextureDimension; 343 | 344 | typedef enum NkTextureFormat { 345 | NkTextureFormat_Undefined = 0x00000000, 346 | NkTextureFormat_R8Unorm = 0x00000001, 347 | NkTextureFormat_R8Snorm = 0x00000002, 348 | NkTextureFormat_R8Uint = 0x00000003, 349 | NkTextureFormat_R8Sint = 0x00000004, 350 | NkTextureFormat_R16Uint = 0x00000005, 351 | NkTextureFormat_R16Sint = 0x00000006, 352 | NkTextureFormat_R16Float = 0x00000007, 353 | NkTextureFormat_RG8Unorm = 0x00000008, 354 | NkTextureFormat_RG8Snorm = 0x00000009, 355 | NkTextureFormat_RG8Uint = 0x0000000A, 356 | NkTextureFormat_RG8Sint = 0x0000000B, 357 | NkTextureFormat_R32Float = 0x0000000C, 358 | NkTextureFormat_R32Uint = 0x0000000D, 359 | NkTextureFormat_R32Sint = 0x0000000E, 360 | NkTextureFormat_RG16Uint = 0x0000000F, 361 | NkTextureFormat_RG16Sint = 0x00000010, 362 | NkTextureFormat_RG16Float = 0x00000011, 363 | NkTextureFormat_RGBA8Unorm = 0x00000012, 364 | NkTextureFormat_RGBA8UnormSrgb = 0x00000013, 365 | NkTextureFormat_RGBA8Snorm = 0x00000014, 366 | NkTextureFormat_RGBA8Uint = 0x00000015, 367 | NkTextureFormat_RGBA8Sint = 0x00000016, 368 | NkTextureFormat_BGRA8Unorm = 0x00000017, 369 | NkTextureFormat_BGRA8UnormSrgb = 0x00000018, 370 | NkTextureFormat_RGB10A2Unorm = 0x00000019, 371 | NkTextureFormat_RG11B10Ufloat = 0x0000001A, 372 | NkTextureFormat_RGB9E5Ufloat = 0x0000001B, 373 | NkTextureFormat_RG32Float = 0x0000001C, 374 | NkTextureFormat_RG32Uint = 0x0000001D, 375 | NkTextureFormat_RG32Sint = 0x0000001E, 376 | NkTextureFormat_RGBA16Uint = 0x0000001F, 377 | NkTextureFormat_RGBA16Sint = 0x00000020, 378 | NkTextureFormat_RGBA16Float = 0x00000021, 379 | NkTextureFormat_RGBA32Float = 0x00000022, 380 | NkTextureFormat_RGBA32Uint = 0x00000023, 381 | NkTextureFormat_RGBA32Sint = 0x00000024, 382 | NkTextureFormat_Depth32Float = 0x00000025, 383 | NkTextureFormat_Depth24Plus = 0x00000026, 384 | NkTextureFormat_Depth24PlusStencil8 = 0x00000027, 385 | NkTextureFormat_Stencil8 = 0x00000028, 386 | NkTextureFormat_BC1RGBAUnorm = 0x00000029, 387 | NkTextureFormat_BC1RGBAUnormSrgb = 0x0000002A, 388 | NkTextureFormat_BC2RGBAUnorm = 0x0000002B, 389 | NkTextureFormat_BC2RGBAUnormSrgb = 0x0000002C, 390 | NkTextureFormat_BC3RGBAUnorm = 0x0000002D, 391 | NkTextureFormat_BC3RGBAUnormSrgb = 0x0000002E, 392 | NkTextureFormat_BC4RUnorm = 0x0000002F, 393 | NkTextureFormat_BC4RSnorm = 0x00000030, 394 | NkTextureFormat_BC5RGUnorm = 0x00000031, 395 | NkTextureFormat_BC5RGSnorm = 0x00000032, 396 | NkTextureFormat_BC6HRGBUfloat = 0x00000033, 397 | NkTextureFormat_BC6HRGBFloat = 0x00000034, 398 | NkTextureFormat_BC7RGBAUnorm = 0x00000035, 399 | NkTextureFormat_BC7RGBAUnormSrgb = 0x00000036, 400 | NkTextureFormat_Force32 = 0x7FFFFFFF 401 | } NkTextureFormat; 402 | 403 | typedef enum NkTextureViewDimension { 404 | NkTextureViewDimension_Undefined = 0x00000000, 405 | NkTextureViewDimension_1D = 0x00000001, 406 | NkTextureViewDimension_2D = 0x00000002, 407 | NkTextureViewDimension_2DArray = 0x00000003, 408 | NkTextureViewDimension_Cube = 0x00000004, 409 | NkTextureViewDimension_CubeArray = 0x00000005, 410 | NkTextureViewDimension_3D = 0x00000006, 411 | NkTextureViewDimension_Force32 = 0x7FFFFFFF 412 | } NkTextureViewDimension; 413 | 414 | typedef enum NkVertexFormat { 415 | NkVertexFormat_UChar2 = 0x00000000, 416 | NkVertexFormat_UChar4 = 0x00000001, 417 | NkVertexFormat_Char2 = 0x00000002, 418 | NkVertexFormat_Char4 = 0x00000003, 419 | NkVertexFormat_UChar2Norm = 0x00000004, 420 | NkVertexFormat_UChar4Norm = 0x00000005, 421 | NkVertexFormat_Char2Norm = 0x00000006, 422 | NkVertexFormat_Char4Norm = 0x00000007, 423 | NkVertexFormat_UShort2 = 0x00000008, 424 | NkVertexFormat_UShort4 = 0x00000009, 425 | NkVertexFormat_Short2 = 0x0000000A, 426 | NkVertexFormat_Short4 = 0x0000000B, 427 | NkVertexFormat_UShort2Norm = 0x0000000C, 428 | NkVertexFormat_UShort4Norm = 0x0000000D, 429 | NkVertexFormat_Short2Norm = 0x0000000E, 430 | NkVertexFormat_Short4Norm = 0x0000000F, 431 | NkVertexFormat_Half2 = 0x00000010, 432 | NkVertexFormat_Half4 = 0x00000011, 433 | NkVertexFormat_Float = 0x00000012, 434 | NkVertexFormat_Float2 = 0x00000013, 435 | NkVertexFormat_Float3 = 0x00000014, 436 | NkVertexFormat_Float4 = 0x00000015, 437 | NkVertexFormat_UInt = 0x00000016, 438 | NkVertexFormat_UInt2 = 0x00000017, 439 | NkVertexFormat_UInt3 = 0x00000018, 440 | NkVertexFormat_UInt4 = 0x00000019, 441 | NkVertexFormat_Int = 0x0000001A, 442 | NkVertexFormat_Int2 = 0x0000001B, 443 | NkVertexFormat_Int3 = 0x0000001C, 444 | NkVertexFormat_Int4 = 0x0000001D, 445 | NkVertexFormat_Force32 = 0x7FFFFFFF 446 | } NkVertexFormat; 447 | 448 | typedef enum NkBufferUsage { 449 | NkBufferUsage_None = 0x00000000, 450 | NkBufferUsage_MapRead = 0x00000001, 451 | NkBufferUsage_MapWrite = 0x00000002, 452 | NkBufferUsage_CopySrc = 0x00000004, 453 | NkBufferUsage_CopyDst = 0x00000008, 454 | NkBufferUsage_Index = 0x00000010, 455 | NkBufferUsage_Vertex = 0x00000020, 456 | NkBufferUsage_Uniform = 0x00000040, 457 | NkBufferUsage_Storage = 0x00000080, 458 | NkBufferUsage_Indirect = 0x00000100, 459 | NkBufferUsage_QueryResolve = 0x00000200, 460 | NkBufferUsage_Force32 = 0x7FFFFFFF 461 | } NkBufferUsage; 462 | typedef NkFlags NkBufferUsageFlags; 463 | 464 | typedef enum NkColorWriteMask { 465 | NkColorWriteMask_None = 0x00000000, 466 | NkColorWriteMask_Red = 0x00000001, 467 | NkColorWriteMask_Green = 0x00000002, 468 | NkColorWriteMask_Blue = 0x00000004, 469 | NkColorWriteMask_Alpha = 0x00000008, 470 | NkColorWriteMask_All = 0x0000000F, 471 | NkColorWriteMask_Force32 = 0x7FFFFFFF 472 | } NkColorWriteMask; 473 | typedef NkFlags NkColorWriteMaskFlags; 474 | 475 | typedef enum NkMapMode { 476 | NkMapMode_Read = 0x00000001, 477 | NkMapMode_Write = 0x00000002, 478 | NkMapMode_Force32 = 0x7FFFFFFF 479 | } NkMapMode; 480 | typedef NkFlags NkMapModeFlags; 481 | 482 | typedef enum NkShaderStage { 483 | NkShaderStage_None = 0x00000000, 484 | NkShaderStage_Vertex = 0x00000001, 485 | NkShaderStage_Fragment = 0x00000002, 486 | NkShaderStage_Compute = 0x00000004, 487 | NkShaderStage_Force32 = 0x7FFFFFFF 488 | } NkShaderStage; 489 | typedef NkFlags NkShaderStageFlags; 490 | 491 | typedef enum NkTextureUsage { 492 | NkTextureUsage_None = 0x00000000, 493 | NkTextureUsage_CopySrc = 0x00000001, 494 | NkTextureUsage_CopyDst = 0x00000002, 495 | NkTextureUsage_Sampled = 0x00000004, 496 | NkTextureUsage_Storage = 0x00000008, 497 | NkTextureUsage_RenderAttachment = 0x00000010, 498 | NkTextureUsage_Force32 = 0x7FFFFFFF 499 | } NkTextureUsage; 500 | typedef NkFlags NkTextureUsageFlags; 501 | 502 | typedef struct NkBindGroupEntry { 503 | uint32_t binding; 504 | NkBuffer buffer; 505 | uint64_t offset; 506 | uint64_t size; 507 | NkSampler sampler; 508 | NkTextureView textureView; 509 | } NkBindGroupEntry; 510 | 511 | typedef struct NkBindGroupLayoutEntry { 512 | uint32_t binding; 513 | NkShaderStageFlags visibility; 514 | NkBindingType type; 515 | NkBool hasDynamicOffset; 516 | uint64_t minBufferBindingSize; 517 | NkBool multisampled; 518 | NkTextureViewDimension viewDimension; 519 | NkTextureComponentType textureComponentType; 520 | NkTextureFormat storageTextureFormat; 521 | } NkBindGroupLayoutEntry; 522 | 523 | typedef struct NkBlendInfo { 524 | NkBlendOperation operation; 525 | NkBlendFactor srcFactor; 526 | NkBlendFactor dstFactor; 527 | } NkBlendInfo; 528 | 529 | typedef struct NkBufferInfo { 530 | NkBufferUsageFlags usage; 531 | uint64_t size; 532 | NkBool mappedAtCreation; 533 | } NkBufferInfo; 534 | 535 | typedef struct NkFloat3 { 536 | float x; 537 | float y; 538 | float z; 539 | } NkFloat3; 540 | 541 | typedef struct NkFloat4 { 542 | float x; 543 | float y; 544 | float z; 545 | float w; 546 | } NkFloat4; 547 | 548 | typedef struct NkColor { 549 | float r; 550 | float g; 551 | float b; 552 | float a; 553 | } NkColor; 554 | 555 | typedef struct NkExtent3D { 556 | uint32_t width; 557 | uint32_t height; 558 | uint32_t depth; 559 | } NkExtent3D; 560 | 561 | typedef struct NkFenceInfo { 562 | uint64_t initialValue; 563 | } NkFenceInfo; 564 | 565 | typedef struct NkOrigin3D { 566 | uint32_t x; 567 | uint32_t y; 568 | uint32_t z; 569 | } NkOrigin3D; 570 | 571 | typedef struct NkPipelineLayoutInfo { 572 | uint32_t bindGroupLayoutCount; 573 | const NkBindGroupLayout* bindGroupLayouts; 574 | } NkPipelineLayoutInfo; 575 | 576 | typedef struct NkProgrammableStageInfo { 577 | NkShaderModule module; 578 | const char* entryPoint; 579 | } NkProgrammableStageInfo; 580 | 581 | typedef struct NkQuerySetInfo { 582 | NkQueryType type; 583 | uint32_t count; 584 | const NkPipelineStatisticName* pipelineStatistics; 585 | uint32_t pipelineStatisticsCount; 586 | } NkQuerySetInfo; 587 | 588 | typedef struct NkRasterizationStateInfo { 589 | NkFrontFace frontFace; 590 | NkCullMode cullMode; 591 | int32_t depthBias; 592 | float depthBiasSlopeScale; 593 | float depthBiasClamp; 594 | NkBool clampDepth; 595 | } NkRasterizationStateInfo; 596 | 597 | typedef struct NkRenderBundleEncoderInfo { 598 | uint32_t colorFormatsCount; 599 | const NkTextureFormat* colorFormats; 600 | NkTextureFormat depthStencilFormat; 601 | uint32_t sampleCount; 602 | } NkRenderBundleEncoderInfo; 603 | 604 | typedef struct NkRenderPassDepthStencilAttachmentInfo { 605 | NkTextureView attachment; 606 | NkLoadOp depthLoadOp; 607 | NkStoreOp depthStoreOp; 608 | float clearDepth; 609 | NkBool depthReadOnly; 610 | NkLoadOp stencilLoadOp; 611 | NkStoreOp stencilStoreOp; 612 | uint32_t clearStencil; 613 | NkBool stencilReadOnly; 614 | } NkRenderPassDepthStencilAttachmentInfo; 615 | 616 | typedef struct NkSamplerInfo { 617 | NkAddressMode addressModeU; 618 | NkAddressMode addressModeV; 619 | NkAddressMode addressModeW; 620 | NkFilterMode magFilter; 621 | NkFilterMode minFilter; 622 | NkFilterMode mipmapFilter; 623 | float lodMinClamp; 624 | float lodMaxClamp; 625 | NkCompareFunction compare; 626 | uint32_t maxAnisotropy; 627 | } NkSamplerInfo; 628 | 629 | typedef struct NkShaderModuleInfo { 630 | uint32_t size; // size of the shader source in bytes 631 | const void* source; // pointer to the shader source 632 | } NkShaderModuleInfo; 633 | 634 | typedef struct NkStencilStateFaceInfo { 635 | NkCompareFunction compare; 636 | NkStencilOperation failOp; 637 | NkStencilOperation depthFailOp; 638 | NkStencilOperation passOp; 639 | } NkStencilStateFaceInfo; 640 | 641 | typedef struct NkNativeSurface { 642 | #if defined(_WIN32) 643 | HINSTANCE hinstance; 644 | HWND hwnd; 645 | #endif 646 | } NkNativeSurface; 647 | 648 | typedef struct NkSurfaceInfo { 649 | NkNativeSurface native; 650 | } NkSurfaceInfo; 651 | 652 | typedef struct NkSwapChainInfo { 653 | uint32_t width; 654 | uint32_t height; 655 | } NkSwapChainInfo; 656 | 657 | typedef struct NkTextureDataLayout { 658 | uint64_t offset; 659 | uint32_t bytesPerRow; 660 | uint32_t rowsPerImage; 661 | } NkTextureDataLayout; 662 | 663 | typedef struct NkTextureViewInfo { 664 | NkTextureFormat format; 665 | NkTextureViewDimension dimension; 666 | uint32_t baseMipLevel; 667 | uint32_t mipLevelCount; 668 | uint32_t baseArrayLayer; 669 | uint32_t arrayLayerCount; 670 | NkTextureAspect aspect; 671 | } NkTextureViewInfo; 672 | 673 | typedef struct NkVertexAttributeInfo { 674 | NkVertexFormat format; 675 | uint64_t offset; 676 | uint32_t shaderLocation; 677 | } NkVertexAttributeInfo; 678 | 679 | typedef struct NkBindGroupInfo { 680 | NkBindGroupLayout layout; 681 | uint32_t entryCount; 682 | const NkBindGroupEntry* entries; 683 | } NkBindGroupInfo; 684 | 685 | typedef struct NkBindGroupLayoutInfo { 686 | uint32_t entryCount; 687 | const NkBindGroupLayoutEntry* entries; 688 | } NkBindGroupLayoutInfo; 689 | 690 | typedef struct NkBufferCopyView { 691 | NkTextureDataLayout layout; 692 | NkBuffer buffer; 693 | } NkBufferCopyView; 694 | 695 | typedef struct NkColorStateInfo { 696 | NkTextureFormat format; 697 | NkBlendInfo alphaBlend; 698 | NkBlendInfo colorBlend; 699 | NkColorWriteMaskFlags writeMask; 700 | } NkColorStateInfo; 701 | 702 | typedef struct NkComputePipelineInfo { 703 | NkPipelineLayout layout; 704 | NkProgrammableStageInfo computeStage; 705 | } NkComputePipelineInfo; 706 | 707 | typedef struct NkDepthStencilStateInfo { 708 | NkTextureFormat format; 709 | NkBool depthWriteEnabled; 710 | NkCompareFunction depthCompare; 711 | NkStencilStateFaceInfo stencilFront; 712 | NkStencilStateFaceInfo stencilBack; 713 | uint32_t stencilReadMask; 714 | uint32_t stencilWriteMask; 715 | } NkDepthStencilStateInfo; 716 | 717 | typedef struct NkRenderPassColorAttachmentInfo { 718 | NkTextureView attachment; 719 | NkTextureView resolveTarget; 720 | NkLoadOp loadOp; 721 | NkStoreOp storeOp; 722 | NkColor clearColor; 723 | } NkRenderPassColorAttachmentInfo; 724 | 725 | typedef struct NkTextureCopyView { 726 | NkTexture texture; 727 | uint32_t mipLevel; 728 | NkOrigin3D origin; 729 | } NkTextureCopyView; 730 | 731 | typedef struct NkTextureInfo { 732 | NkTextureUsageFlags usage; 733 | NkTextureDimension dimension; 734 | NkExtent3D size; 735 | NkTextureFormat format; 736 | uint32_t mipLevelCount; 737 | uint32_t sampleCount; 738 | } NkTextureInfo; 739 | 740 | typedef struct NkVertexBufferLayoutInfo { 741 | uint64_t arrayStride; 742 | NkInputStepMode stepMode; 743 | uint32_t attributeCount; 744 | const NkVertexAttributeInfo* attributes; 745 | } NkVertexBufferLayoutInfo; 746 | 747 | typedef struct NkRenderPassInfo { 748 | uint32_t colorAttachmentCount; 749 | const NkRenderPassColorAttachmentInfo* colorAttachments; 750 | const NkRenderPassDepthStencilAttachmentInfo* depthStencilAttachment; 751 | NkQuerySet occlusionQuerySet; 752 | } NkRenderPassInfo; 753 | 754 | typedef struct NkVertexStateInfo { 755 | uint32_t vertexBufferCount; 756 | const NkVertexBufferLayoutInfo* vertexBuffers; 757 | } NkVertexStateInfo; 758 | 759 | typedef struct NkRenderPipelineInfo { 760 | NkPipelineLayout layout; 761 | NkProgrammableStageInfo vertexStage; 762 | NkProgrammableStageInfo fragmentStage; 763 | const NkVertexStateInfo* vertexState; 764 | NkPrimitiveTopology primitiveTopology; 765 | const NkRasterizationStateInfo* rasterizationState; 766 | uint32_t sampleCount; 767 | const NkDepthStencilStateInfo* depthStencilState; 768 | uint32_t colorStateCount; 769 | const NkColorStateInfo* colorStates; 770 | uint32_t sampleMask; 771 | NkBool alphaToCoverageEnabled; 772 | } NkRenderPipelineInfo; 773 | 774 | #ifdef __cplusplus 775 | extern "C" { 776 | #endif 777 | 778 | typedef void (*NkDeviceLostCallback)(const char* message, void* userdata); 779 | typedef void (*NkErrorCallback)(NkErrorType type, const char* message, void* userdata); 780 | typedef void (*NkFenceOnCompletionCallback)(NkFenceCompletionStatus status, void* userdata); 781 | 782 | NK_EXPORT NkInstance nkCreateInstance(); 783 | 784 | // Methods of Buffer 785 | NK_EXPORT void nkDestroyBuffer(NkBuffer buffer); 786 | NK_EXPORT const void* nkBufferGetConstMappedRange(NkBuffer buffer, size_t offset, size_t size); 787 | NK_EXPORT void* nkBufferGetMappedRange(NkBuffer buffer, size_t offset, size_t size); 788 | NK_EXPORT NkBufferMapAsyncStatus nkBufferMap(NkBuffer buffer, NkMapModeFlags mode, size_t offset, size_t size); 789 | NK_EXPORT void nkBufferUnmap(NkBuffer buffer); 790 | 791 | // Methods of CommandEncoder 792 | NK_EXPORT NkComputePassEncoder nkCommandEncoderBeginComputePass(NkCommandEncoder commandEncoder); 793 | NK_EXPORT NkRenderPassEncoder nkCommandEncoderBeginRenderPass(NkCommandEncoder commandEncoder, const NkRenderPassInfo* descriptor); 794 | NK_EXPORT void nkCommandEncoderCopyBufferToBuffer(NkCommandEncoder commandEncoder, NkBuffer source, uint64_t sourceOffset, NkBuffer destination, uint64_t destinationOffset, uint64_t size); 795 | NK_EXPORT void nkCommandEncoderCopyBufferToTexture(NkCommandEncoder commandEncoder, const NkBufferCopyView* source, const NkTextureCopyView* destination, const NkExtent3D* copySize); 796 | NK_EXPORT void nkCommandEncoderCopyTextureToBuffer(NkCommandEncoder commandEncoder, const NkTextureCopyView* source, const NkBufferCopyView* destination, const NkExtent3D* copySize); 797 | NK_EXPORT void nkCommandEncoderCopyTextureToTexture(NkCommandEncoder commandEncoder, const NkTextureCopyView* source, const NkTextureCopyView* destination, const NkExtent3D* copySize); 798 | NK_EXPORT NkCommandBuffer nkCommandEncoderFinish(NkCommandEncoder commandEncoder); 799 | NK_EXPORT void nkCommandEncoderInsertDebugMarker(NkCommandEncoder commandEncoder, const char* markerLabel); 800 | NK_EXPORT void nkCommandEncoderPopDebugGroup(NkCommandEncoder commandEncoder); 801 | NK_EXPORT void nkCommandEncoderPushDebugGroup(NkCommandEncoder commandEncoder, const char* groupLabel); 802 | NK_EXPORT void nkCommandEncoderResolveQuerySet(NkCommandEncoder commandEncoder, NkQuerySet querySet, uint32_t firstQuery, uint32_t queryCount, NkBuffer destination, uint64_t destinationOffset); 803 | NK_EXPORT void nkCommandEncoderWriteTimestamp(NkCommandEncoder commandEncoder, NkQuerySet querySet, uint32_t queryIndex); 804 | 805 | // Methods of ComputePassEncoder 806 | NK_EXPORT void nkComputePassEncoderBeginPipelineStatisticsQuery(NkComputePassEncoder computePassEncoder, NkQuerySet querySet, uint32_t queryIndex); 807 | NK_EXPORT void nkComputePassEncoderDispatch(NkComputePassEncoder computePassEncoder, uint32_t x, uint32_t y, uint32_t z); 808 | NK_EXPORT void nkComputePassEncoderDispatchIndirect(NkComputePassEncoder computePassEncoder, NkBuffer indirectBuffer, uint64_t indirectOffset); 809 | NK_EXPORT void nkComputePassEncoderEndPass(NkComputePassEncoder computePassEncoder); 810 | NK_EXPORT void nkComputePassEncoderEndPipelineStatisticsQuery(NkComputePassEncoder computePassEncoder); 811 | NK_EXPORT void nkComputePassEncoderInsertDebugMarker(NkComputePassEncoder computePassEncoder, const char* markerLabel); 812 | NK_EXPORT void nkComputePassEncoderPopDebugGroup(NkComputePassEncoder computePassEncoder); 813 | NK_EXPORT void nkComputePassEncoderPushDebugGroup(NkComputePassEncoder computePassEncoder, const char* groupLabel); 814 | NK_EXPORT void nkComputePassEncoderSetBindGroup(NkComputePassEncoder computePassEncoder, uint32_t groupIndex, NkBindGroup group, uint32_t dynamicOffsetCount, const uint32_t* dynamicOffsets); 815 | NK_EXPORT void nkComputePassEncoderSetPipeline(NkComputePassEncoder computePassEncoder, NkComputePipeline pipeline); 816 | NK_EXPORT void nkComputePassEncoderWriteTimestamp(NkComputePassEncoder computePassEncoder, NkQuerySet querySet, uint32_t queryIndex); 817 | 818 | // Methods of ComputePipeline 819 | NK_EXPORT NkBindGroupLayout nkComputePipelineGetBindGroupLayout(NkComputePipeline computePipeline, uint32_t groupIndex); 820 | 821 | // Methods of Device 822 | NK_EXPORT void nkDestroyDevice(NkDevice device); 823 | NK_EXPORT NkBindGroup nkCreateBindGroup(NkDevice device, const NkBindGroupInfo* descriptor); 824 | NK_EXPORT NkBindGroupLayout nkCreateBindGroupLayout(NkDevice device, const NkBindGroupLayoutInfo* descriptor); 825 | NK_EXPORT NkBuffer nkCreateBuffer(NkDevice device, const NkBufferInfo* descriptor); 826 | NK_EXPORT NkCommandEncoder nkCreateCommandEncoder(NkDevice device); 827 | NK_EXPORT NkComputePipeline nkCreateComputePipeline(NkDevice device, const NkComputePipelineInfo* descriptor); 828 | NK_EXPORT NkPipelineLayout nkCreatePipelineLayout(NkDevice device, const NkPipelineLayoutInfo* descriptor); 829 | NK_EXPORT NkQuerySet nkCreateQuerySet(NkDevice device, const NkQuerySetInfo* descriptor); 830 | NK_EXPORT NkRenderBundleEncoder nkCreateRenderBundleEncoder(NkDevice device, const NkRenderBundleEncoderInfo* descriptor); 831 | NK_EXPORT NkRenderPipeline nkCreateRenderPipeline(NkDevice device, const NkRenderPipelineInfo* descriptor); 832 | NK_EXPORT NkSampler nkCreateSampler(NkDevice device, const NkSamplerInfo* descriptor); 833 | NK_EXPORT NkSwapChain nkCreateSwapChain(NkDevice device, NkSurface surface, const NkSwapChainInfo* descriptor); 834 | NK_EXPORT NkTexture nkCreateTexture(NkDevice device, const NkTextureInfo* descriptor); 835 | NK_EXPORT NkQueue nkDeviceGetDefaultQueue(NkDevice device); 836 | NK_EXPORT NkBool nkDevicePopErrorScope(NkDevice device, NkErrorCallback callback, void* userdata); 837 | NK_EXPORT void nkDevicePushErrorScope(NkDevice device, NkErrorFilter filter); 838 | NK_EXPORT void nkDeviceSetDeviceLostCallback(NkDevice device, NkDeviceLostCallback callback, void* userdata); 839 | NK_EXPORT void nkDeviceSetUncapturedErrorCallback(NkDevice device, NkErrorCallback callback, void* userdata); 840 | 841 | NK_EXPORT NkShaderModule nkCreateShaderModule(NkDevice device, const NkShaderModuleInfo* descriptor); 842 | NK_EXPORT void nkDestroyShaderModule(NkShaderModule shaderModule); 843 | 844 | // Methods of Fence 845 | NK_EXPORT void nkDeviceFence(NkFence fence); 846 | NK_EXPORT uint64_t nkFenceGetCompletedValue(NkFence fence); 847 | NK_EXPORT void nkFenceOnCompletion(NkFence fence, uint64_t value, NkFenceOnCompletionCallback callback, void* userdata); 848 | 849 | // Methods of Instance 850 | NK_EXPORT void nkDestroyInstance(NkInstance instance); 851 | NK_EXPORT NkSurface nkCreateSurface(NkInstance instance, const NkSurfaceInfo* descriptor); 852 | NK_EXPORT NkDevice nkCreateDevice(NkInstance instance, NkSurface surface); 853 | 854 | // Methods of QuerySet 855 | NK_EXPORT void nkDestroyQuerySet(NkQuerySet querySet); 856 | 857 | // Methods of Queue 858 | NK_EXPORT void nkDestroyQueue(NkQueue queue); 859 | NK_EXPORT NkFence nkCreateFence(NkQueue queue, const NkFenceInfo* descriptor); 860 | NK_EXPORT void nkQueueSignal(NkQueue queue, NkFence fence, uint64_t signalValue); 861 | NK_EXPORT void nkQueueSubmit(NkQueue queue, uint32_t commandCount, const NkCommandBuffer* commands); 862 | NK_EXPORT void nkQueueWriteBuffer(NkQueue queue, NkBuffer buffer, uint64_t bufferOffset, const void* data, size_t size); 863 | NK_EXPORT void nkQueueWriteTexture(NkQueue queue, const NkTextureCopyView* destination, const void* data, size_t dataSize, const NkTextureDataLayout* dataLayout, const NkExtent3D* writeSize); 864 | 865 | // Methods of RenderBundleEncoder 866 | NK_EXPORT void nkRenderBundleEncoderDraw(NkRenderBundleEncoder renderBundleEncoder, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance); 867 | NK_EXPORT void nkRenderBundleEncoderDrawIndexed(NkRenderBundleEncoder renderBundleEncoder, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t baseVertex, uint32_t firstInstance); 868 | NK_EXPORT void nkRenderBundleEncoderDrawIndexedIndirect(NkRenderBundleEncoder renderBundleEncoder, NkBuffer indirectBuffer, uint64_t indirectOffset); 869 | NK_EXPORT void nkRenderBundleEncoderDrawIndirect(NkRenderBundleEncoder renderBundleEncoder, NkBuffer indirectBuffer, uint64_t indirectOffset); 870 | NK_EXPORT NkRenderBundle nkRenderBundleEncoderFinish(NkRenderBundleEncoder renderBundleEncoder); 871 | NK_EXPORT void nkRenderBundleEncoderInsertDebugMarker(NkRenderBundleEncoder renderBundleEncoder, const char* markerLabel); 872 | NK_EXPORT void nkRenderBundleEncoderPopDebugGroup(NkRenderBundleEncoder renderBundleEncoder); 873 | NK_EXPORT void nkRenderBundleEncoderPushDebugGroup(NkRenderBundleEncoder renderBundleEncoder, const char* groupLabel); 874 | NK_EXPORT void nkRenderBundleEncoderSetBindGroup(NkRenderBundleEncoder renderBundleEncoder, uint32_t groupIndex, NkBindGroup group, uint32_t dynamicOffsetCount, const uint32_t* dynamicOffsets); 875 | NK_EXPORT void nkRenderBundleEncoderSetIndexBuffer(NkRenderBundleEncoder renderBundleEncoder, NkBuffer buffer, NkIndexFormat format, uint64_t offset, uint64_t size); 876 | NK_EXPORT void nkRenderBundleEncoderSetPipeline(NkRenderBundleEncoder renderBundleEncoder, NkRenderPipeline pipeline); 877 | NK_EXPORT void nkRenderBundleEncoderSetVertexBuffer(NkRenderBundleEncoder renderBundleEncoder, uint32_t slot, NkBuffer buffer, uint64_t offset, uint64_t size); 878 | 879 | // Methods of RenderPassEncoder 880 | NK_EXPORT void nkRenderPassEncoderBeginOcclusionQuery(NkRenderPassEncoder renderPassEncoder, uint32_t queryIndex); 881 | NK_EXPORT void nkRenderPassEncoderBeginPipelineStatisticsQuery(NkRenderPassEncoder renderPassEncoder, NkQuerySet querySet, uint32_t queryIndex); 882 | NK_EXPORT void nkRenderPassEncoderDraw(NkRenderPassEncoder renderPassEncoder, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance); 883 | NK_EXPORT void nkRenderPassEncoderDrawIndexed(NkRenderPassEncoder renderPassEncoder, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t baseVertex, uint32_t firstInstance); 884 | NK_EXPORT void nkRenderPassEncoderDrawIndexedIndirect(NkRenderPassEncoder renderPassEncoder, NkBuffer indirectBuffer, uint64_t indirectOffset); 885 | NK_EXPORT void nkRenderPassEncoderDrawIndirect(NkRenderPassEncoder renderPassEncoder, NkBuffer indirectBuffer, uint64_t indirectOffset); 886 | NK_EXPORT void nkRenderPassEncoderEndOcclusionQuery(NkRenderPassEncoder renderPassEncoder); 887 | NK_EXPORT void nkRenderPassEncoderEndPass(NkRenderPassEncoder renderPassEncoder); 888 | NK_EXPORT void nkRenderPassEncoderEndPipelineStatisticsQuery(NkRenderPassEncoder renderPassEncoder); 889 | NK_EXPORT void nkRenderPassEncoderExecuteBundles(NkRenderPassEncoder renderPassEncoder, uint32_t bundlesCount, const NkRenderBundle* bundles); 890 | NK_EXPORT void nkRenderPassEncoderInsertDebugMarker(NkRenderPassEncoder renderPassEncoder, const char* markerLabel); 891 | NK_EXPORT void nkRenderPassEncoderPopDebugGroup(NkRenderPassEncoder renderPassEncoder); 892 | NK_EXPORT void nkRenderPassEncoderPushDebugGroup(NkRenderPassEncoder renderPassEncoder, const char* groupLabel); 893 | NK_EXPORT void nkRenderPassEncoderSetBindGroup(NkRenderPassEncoder renderPassEncoder, uint32_t groupIndex, NkBindGroup group, uint32_t dynamicOffsetCount, const uint32_t* dynamicOffsets); 894 | NK_EXPORT void nkRenderPassEncoderSetBlendColor(NkRenderPassEncoder renderPassEncoder, const NkColor* color); 895 | NK_EXPORT void nkRenderPassEncoderSetIndexBuffer(NkRenderPassEncoder renderPassEncoder, NkBuffer buffer, uint64_t offset, uint64_t size); 896 | NK_EXPORT void nkRenderPassEncoderSetPipeline(NkRenderPassEncoder renderPassEncoder, NkRenderPipeline pipeline); 897 | NK_EXPORT void nkRenderPassEncoderSetScissorRect(NkRenderPassEncoder renderPassEncoder, uint32_t x, uint32_t y, uint32_t width, uint32_t height); 898 | NK_EXPORT void nkRenderPassEncoderSetStencilReference(NkRenderPassEncoder renderPassEncoder, uint32_t reference); 899 | NK_EXPORT void nkRenderPassEncoderSetVertexBuffer(NkRenderPassEncoder renderPassEncoder, uint32_t slot, NkBuffer buffer, uint64_t offset, uint64_t size); 900 | NK_EXPORT void nkRenderPassEncoderSetViewport(NkRenderPassEncoder renderPassEncoder, float x, float y, float width, float height, float minDepth, float maxDepth); 901 | NK_EXPORT void nkRenderPassEncoderWriteTimestamp(NkRenderPassEncoder renderPassEncoder, NkQuerySet querySet, uint32_t queryIndex); 902 | 903 | // Methods of RenderPipeline 904 | NK_EXPORT void nkDestroyRenderPipeline(NkRenderPipeline renderPipeline); 905 | NK_EXPORT NkBindGroupLayout nkRenderPipelineGetBindGroupLayout(NkRenderPipeline renderPipeline, uint32_t groupIndex); 906 | 907 | // Methods of Surface 908 | NK_EXPORT void nkDestroySurface(NkSurface surface); 909 | 910 | // Methods of SwapChain 911 | NK_EXPORT void nkDestroySwapChain(NkSwapChain swapChain); 912 | NK_EXPORT NkTextureView nkSwapChainGetCurrentTextureView(NkSwapChain swapChain); 913 | NK_EXPORT void nkSwapChainPresent(NkSwapChain swapChain); 914 | 915 | // Methods of Texture 916 | NK_EXPORT void nkDestroyTexture(NkTexture texture); 917 | NK_EXPORT NkTextureView nkCreateTextureView(NkTexture texture, const NkTextureViewInfo* descriptor); 918 | 919 | #ifdef __cplusplus 920 | } // extern "C" 921 | #endif 922 | 923 | #ifdef NK_IMPLEMENTATION 924 | 925 | /* 926 | Encoder API design considerations. 927 | 928 | BGFX and Dawn take the approach of recording commands on the CPU, and then 929 | deferring recording to the backend command API until you actually submit the work. 930 | The WebGPU design documents seem to suggest that this approach is intended for all 931 | implementations of WebGPU. This hides a bunch of complexity from the user, but it 932 | requires a bit of work from the implementation. 933 | 934 | https://github.com/gpuweb/gpuweb/blob/main/design/CommandSubmission.md 935 | 936 | I think this is a good idea and means the Encoder API is decoupled from the backends, 937 | meaning we could potentially have a shared Encoder API implementation. Freeing the 938 | encoder implementation from the backend also means we are in control of the implementation, 939 | and can focus on making it nice and simple. 940 | */ 941 | 942 | #if defined(__cplusplus) 943 | #define NK_CAST(type, x) static_cast(x) 944 | #define NK_PTR_CAST(type, x) reinterpret_cast(x) 945 | #define NK_NULL nullptr 946 | #else 947 | #define NK_CAST(type, x) ((type)x) 948 | #define NK_PTR_CAST(type, x) ((type)x) 949 | #define NK_NULL 0 950 | #endif 951 | 952 | #ifndef NK_ASSERT 953 | #include 954 | #define NK_ASSERT(c) assert(c) 955 | #endif 956 | 957 | #ifndef NK_MALLOC 958 | #include 959 | #define NK_MALLOC(size) malloc(size) 960 | #define NK_CALLOC(num, size) calloc(num, size) 961 | #define NK_FREE(pointer) free(pointer) 962 | #endif 963 | 964 | #ifndef NK_DEBUG 965 | #ifndef NDEBUG 966 | #define NK_DEBUG (1) 967 | #endif 968 | #endif 969 | 970 | #ifndef NK_LOG 971 | #ifdef NK_DEBUG 972 | #include 973 | #define NK_LOG(...) { printf(__VA_ARGS__, __FILE__, __LINE__); } 974 | #else 975 | #define NK_LOG(...) 976 | #endif 977 | #endif 978 | 979 | #define NK_MAX(x, y) (((x) > (y)) ? (x) : (y)) 980 | #define NK_MIN(x, y) (((x) < (y)) ? (x) : (y)) 981 | 982 | // Initial allocator is super simple linear allocator. 983 | // In the future it should be backed by a pool of memory blocks to let the allocator expand. 984 | 985 | typedef struct NkCommandAllocator { 986 | uintptr_t buffer; 987 | uint32_t bufferSize; 988 | uint32_t allocatedSize; 989 | uint32_t lastAllocationSize; 990 | } NkCommandAllocator; 991 | 992 | NkCommandAllocator nkCreateCommandAllocator(uint32_t size) { 993 | 994 | NkCommandAllocator allocator; 995 | { 996 | allocator.buffer = NK_PTR_CAST(uintptr_t, NK_MALLOC(size)); 997 | NK_ASSERT(allocator.buffer); 998 | allocator.bufferSize = size; 999 | allocator.allocatedSize = 0; 1000 | allocator.lastAllocationSize = 0; 1001 | } 1002 | return allocator; 1003 | } 1004 | 1005 | void nkCommandAllocatorReset(NkCommandAllocator* const allocator) { 1006 | 1007 | NK_ASSERT(allocator); 1008 | allocator->buffer = 0; 1009 | allocator->bufferSize = 0; 1010 | allocator->allocatedSize = 0; 1011 | } 1012 | 1013 | #define NK_IS_POWER_OF_TWO(value) (value != 0 && (value & (value - 1)) == 0) 1014 | 1015 | #define NK_ALIGN_TO(type, value, alignment) (NK_CAST(type, (((value) + (alignment) - 1) & ~((alignment) - 1)))) 1016 | 1017 | #define NK_PTR_ALIGN_TO(type, value, alignment) NK_PTR_CAST(type*, (NK_ALIGN_TO(NK_PTR_CAST(uintptr_t, value), alignment))) 1018 | 1019 | #define NK_IS_PTR_ALIGNED(ptr, alignment) !(NK_PTR_CAST(uintptr_t, ptr) % alignment) 1020 | 1021 | NkBool nkCanSatisfyAllocation(uintptr_t buffer, uint32_t bufferSize, uint32_t allocatedSize, uint32_t size, uint32_t alignment) { 1022 | 1023 | uintptr_t bufferHead = buffer + allocatedSize; 1024 | uintptr_t allocStart = NK_ALIGN_TO(uintptr_t, bufferHead, alignment); 1025 | 1026 | NK_ASSERT(allocStart >= bufferHead); 1027 | if (allocStart < bufferHead) 1028 | { 1029 | // Alignment made us overflow 1030 | return NkFalse; 1031 | } 1032 | 1033 | uintptr_t allocEnd = allocStart + size; 1034 | 1035 | NK_ASSERT(allocEnd > allocStart); 1036 | if (allocEnd <= allocStart) 1037 | { 1038 | // Requested size made us overflow 1039 | return NkFalse; 1040 | } 1041 | 1042 | uintptr_t allocSize = allocEnd - bufferHead; 1043 | uint32_t newAllocatedSize = allocatedSize + allocSize; 1044 | 1045 | NK_ASSERT(newAllocatedSize <= bufferSize); 1046 | if (newAllocatedSize <= bufferSize) 1047 | { 1048 | // Still has free space, we fit 1049 | return NkTrue; 1050 | } 1051 | 1052 | // Not enough space 1053 | return NkFalse; 1054 | } 1055 | 1056 | void* nkAllocateFromBuffer(uintptr_t buffer, uint32_t bufferSize, uint32_t* allocatedSize, size_t size, size_t alignment, uint32_t* const outAllocationOffset) { 1057 | 1058 | NK_ASSERT(allocatedSize); 1059 | 1060 | uintptr_t bufferHead = buffer + *allocatedSize; 1061 | uintptr_t allocStart = NK_ALIGN_TO(uintptr_t, bufferHead, alignment); 1062 | NK_ASSERT(allocStart >= bufferHead); 1063 | 1064 | uintptr_t allocEnd = allocStart + size; 1065 | NK_ASSERT(allocEnd > allocStart); 1066 | 1067 | uintptr_t allocSize = allocEnd - bufferHead; 1068 | uint32_t newAllocatedSize = *allocatedSize + allocSize; 1069 | NK_ASSERT(newAllocatedSize <= bufferSize); 1070 | 1071 | *allocatedSize = newAllocatedSize; 1072 | 1073 | if (outAllocationOffset) { 1074 | *outAllocationOffset = NK_CAST(uint32_t, (allocStart - buffer)); 1075 | } 1076 | 1077 | return NK_PTR_CAST(void*, allocStart); 1078 | } 1079 | 1080 | void* nkCommandAllocatorAllocate(NkCommandAllocator* const allocator, uint32_t size, uint32_t alignment) { 1081 | 1082 | NK_ASSERT(allocator); 1083 | NK_ASSERT(allocator->buffer); 1084 | NK_ASSERT(size > 0); 1085 | NK_ASSERT(NK_IS_POWER_OF_TWO(alignment)); 1086 | NK_ASSERT(nkCanSatisfyAllocation(allocator->buffer, allocator->bufferSize, allocator->allocatedSize, size, alignment)); 1087 | return nkAllocateFromBuffer(allocator->buffer, allocator->bufferSize, &allocator->allocatedSize, size, alignment, NK_NULL); 1088 | } 1089 | 1090 | struct NkCommandEncoderImpl { 1091 | NkCommandAllocator allocator; 1092 | }; 1093 | 1094 | struct NkRenderPassEncoderImpl { 1095 | NkCommandAllocator* allocator; 1096 | }; 1097 | 1098 | struct NkComputePassEncoderImpl { 1099 | NkCommandAllocator* allocator; 1100 | }; 1101 | 1102 | #define NK_COMMAND_ALLOCATOR_SIZE 16384 // this is a hack that will be removed when the command allocator is smarter 1103 | 1104 | NkCommandEncoder nkCreateCommandEncoder(NkDevice device) { 1105 | 1106 | NkCommandEncoder commandEncoder = NK_PTR_CAST(NkCommandEncoder, NK_MALLOC(sizeof(struct NkCommandEncoderImpl))); 1107 | NK_ASSERT(commandEncoder); 1108 | commandEncoder->allocator = nkCreateCommandAllocator(NK_COMMAND_ALLOCATOR_SIZE); 1109 | return commandEncoder; 1110 | } 1111 | 1112 | typedef enum NkCommandType { 1113 | NkCommandType_BeginComputePass, 1114 | NkCommandType_BeginRenderPass, 1115 | NkCommandType_RenderPassEncoderSetPipeline, 1116 | NkCommandType_RenderPassEncoderSetVertexBuffer, 1117 | NkCommandType_RenderPassEncoderDraw 1118 | } NkCommandType; 1119 | 1120 | typedef struct NkBeginComputePassCommand { 1121 | NkCommandType type; 1122 | } NkBeginComputePassCommand; 1123 | 1124 | // Methods of CommandEncoder 1125 | NkComputePassEncoder nkCommandEncoderBeginComputePass(NkCommandEncoder commandEncoder) { 1126 | 1127 | NK_ASSERT(commandEncoder); 1128 | 1129 | NkBeginComputePassCommand* command = NK_PTR_CAST(NkBeginComputePassCommand*, nkCommandAllocatorAllocate(&commandEncoder->allocator, sizeof(NkBeginComputePassCommand), NK_ALIGN_OF(NkBeginComputePassCommand))); 1130 | NK_ASSERT(command); 1131 | 1132 | command->type = NkCommandType_BeginComputePass; 1133 | 1134 | NkComputePassEncoder passEncoder = NK_PTR_CAST(NkComputePassEncoder, NK_MALLOC(sizeof(struct NkComputePassEncoderImpl))); 1135 | NK_ASSERT(passEncoder); 1136 | 1137 | passEncoder->allocator = &commandEncoder->allocator; 1138 | 1139 | return passEncoder; 1140 | } 1141 | 1142 | typedef struct NkBeginRenderPassCommand { 1143 | NkCommandType type; 1144 | } NkBeginRenderPassCommand; 1145 | 1146 | NkRenderPassEncoder nkCommandEncoderBeginRenderPass(NkCommandEncoder commandEncoder, const NkRenderPassInfo* descriptor) { 1147 | 1148 | NK_ASSERT(commandEncoder); 1149 | NK_ASSERT(descriptor); 1150 | 1151 | NkBeginRenderPassCommand* command = 1152 | NK_PTR_CAST(NkBeginRenderPassCommand*, 1153 | nkCommandAllocatorAllocate(&commandEncoder->allocator, 1154 | sizeof(NkBeginRenderPassCommand), 1155 | NK_ALIGN_OF(NkBeginRenderPassCommand))); 1156 | NK_ASSERT(command); 1157 | 1158 | command->type = NkCommandType_BeginRenderPass; 1159 | 1160 | NkRenderPassEncoder passEncoder = 1161 | NK_PTR_CAST(NkRenderPassEncoder, NK_MALLOC(sizeof(struct NkRenderPassEncoderImpl))); 1162 | NK_ASSERT(passEncoder); 1163 | 1164 | passEncoder->allocator = &commandEncoder->allocator; 1165 | 1166 | return passEncoder; 1167 | } 1168 | 1169 | void nkCommandEncoderCopyBufferToBuffer(NkCommandEncoder commandEncoder, NkBuffer source, uint64_t sourceOffset, NkBuffer destination, uint64_t destinationOffset, uint64_t size) { 1170 | 1171 | } 1172 | 1173 | void nkCommandEncoderCopyBufferToTexture(NkCommandEncoder commandEncoder, const NkBufferCopyView* source, const NkTextureCopyView* destination, const NkExtent3D* copySize) { 1174 | 1175 | } 1176 | 1177 | void nkCommandEncoderCopyTextureToBuffer(NkCommandEncoder commandEncoder, const NkTextureCopyView* source, const NkBufferCopyView* destination, const NkExtent3D* copySize) { 1178 | 1179 | } 1180 | 1181 | void nkCommandEncoderCopyTextureToTexture(NkCommandEncoder commandEncoder, const NkTextureCopyView* source, const NkTextureCopyView* destination, const NkExtent3D* copySize) { 1182 | 1183 | } 1184 | 1185 | NkCommandBuffer nkCommandEncoderFinish(NkCommandEncoder commandEncoder) { 1186 | 1187 | NK_ASSERT(commandEncoder); 1188 | } 1189 | 1190 | void nkCommandEncoderInsertDebugMarker(NkCommandEncoder commandEncoder, const char* markerLabel) { 1191 | 1192 | } 1193 | 1194 | void nkCommandEncoderPopDebugGroup(NkCommandEncoder commandEncoder) { 1195 | 1196 | } 1197 | 1198 | void nkCommandEncoderPushDebugGroup(NkCommandEncoder commandEncoder, const char* groupLabel) { 1199 | 1200 | } 1201 | 1202 | void nkCommandEncoderResolveQuerySet(NkCommandEncoder commandEncoder, NkQuerySet querySet, uint32_t firstQuery, uint32_t queryCount, NkBuffer destination, uint64_t destinationOffset) { 1203 | 1204 | } 1205 | 1206 | void nkCommandEncoderWriteTimestamp(NkCommandEncoder commandEncoder, NkQuerySet querySet, uint32_t queryIndex) { 1207 | 1208 | } 1209 | 1210 | // Methods of ComputePassEncoder 1211 | void nkComputePassEncoderBeginPipelineStatisticsQuery(NkComputePassEncoder computePassEncoder, NkQuerySet querySet, uint32_t queryIndex) { 1212 | 1213 | } 1214 | 1215 | void nkComputePassEncoderDispatch(NkComputePassEncoder computePassEncoder, uint32_t x, uint32_t y, uint32_t z) { 1216 | 1217 | } 1218 | 1219 | void nkComputePassEncoderDispatchIndirect(NkComputePassEncoder computePassEncoder, NkBuffer indirectBuffer, uint64_t indirectOffset) { 1220 | 1221 | } 1222 | 1223 | void nkComputePassEncoderEndPass(NkComputePassEncoder computePassEncoder) { 1224 | 1225 | } 1226 | 1227 | void nkComputePassEncoderEndPipelineStatisticsQuery(NkComputePassEncoder computePassEncoder) { 1228 | 1229 | } 1230 | 1231 | void nkComputePassEncoderInsertDebugMarker(NkComputePassEncoder computePassEncoder, const char* markerLabel) { 1232 | 1233 | } 1234 | 1235 | void nkComputePassEncoderPopDebugGroup(NkComputePassEncoder computePassEncoder) { 1236 | 1237 | } 1238 | 1239 | void nkComputePassEncoderPushDebugGroup(NkComputePassEncoder computePassEncoder, const char* groupLabel) { 1240 | 1241 | } 1242 | 1243 | void nkComputePassEncoderSetBindGroup(NkComputePassEncoder computePassEncoder, uint32_t groupIndex, NkBindGroup group, uint32_t dynamicOffsetCount, const uint32_t* dynamicOffsets) { 1244 | 1245 | } 1246 | 1247 | void nkComputePassEncoderSetPipeline(NkComputePassEncoder computePassEncoder, NkComputePipeline pipeline) { 1248 | 1249 | } 1250 | 1251 | void nkComputePassEncoderWriteTimestamp(NkComputePassEncoder computePassEncoder, NkQuerySet querySet, uint32_t queryIndex) { 1252 | 1253 | } 1254 | 1255 | // Methods of RenderPassEncoder 1256 | void nkRenderPassEncoderBeginOcclusionQuery(NkRenderPassEncoder renderPassEncoder, uint32_t queryIndex) { 1257 | 1258 | } 1259 | 1260 | void nkRenderPassEncoderBeginPipelineStatisticsQuery(NkRenderPassEncoder renderPassEncoder, NkQuerySet querySet, uint32_t queryIndex) { 1261 | 1262 | } 1263 | 1264 | typedef struct NkRenderPassEncoderDraw { 1265 | NkCommandType type; 1266 | } NkRenderPassEncoderDraw; 1267 | 1268 | void nkRenderPassEncoderDraw(NkRenderPassEncoder renderPassEncoder, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) { 1269 | 1270 | NK_ASSERT(renderPassEncoder); 1271 | 1272 | NkRenderPassEncoderDraw* command = 1273 | NK_PTR_CAST(NkRenderPassEncoderDraw*, 1274 | nkCommandAllocatorAllocate(renderPassEncoder->allocator, 1275 | sizeof(NkRenderPassEncoderDraw), 1276 | NK_ALIGN_OF(NkRenderPassEncoderDraw))); 1277 | NK_ASSERT(command); 1278 | 1279 | command->type = NkCommandType_RenderPassEncoderDraw; 1280 | } 1281 | 1282 | void nkRenderPassEncoderDrawIndexed(NkRenderPassEncoder renderPassEncoder, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t baseVertex, uint32_t firstInstance) { 1283 | 1284 | } 1285 | 1286 | void nkRenderPassEncoderDrawIndexedIndirect(NkRenderPassEncoder renderPassEncoder, NkBuffer indirectBuffer, uint64_t indirectOffset) { 1287 | 1288 | } 1289 | 1290 | void nkRenderPassEncoderDrawIndirect(NkRenderPassEncoder renderPassEncoder, NkBuffer indirectBuffer, uint64_t indirectOffset) { 1291 | 1292 | } 1293 | 1294 | void nkRenderPassEncoderEndOcclusionQuery(NkRenderPassEncoder renderPassEncoder) { 1295 | 1296 | } 1297 | 1298 | void nkRenderPassEncoderEndPass(NkRenderPassEncoder renderPassEncoder) { 1299 | 1300 | } 1301 | 1302 | void nkRenderPassEncoderEndPipelineStatisticsQuery(NkRenderPassEncoder renderPassEncoder) { 1303 | 1304 | } 1305 | 1306 | void nkRenderPassEncoderExecuteBundles(NkRenderPassEncoder renderPassEncoder, uint32_t bundlesCount, const NkRenderBundle* bundles) { 1307 | 1308 | } 1309 | 1310 | void nkRenderPassEncoderInsertDebugMarker(NkRenderPassEncoder renderPassEncoder, const char* markerLabel) { 1311 | 1312 | } 1313 | 1314 | void nkRenderPassEncoderPopDebugGroup(NkRenderPassEncoder renderPassEncoder) { 1315 | 1316 | } 1317 | 1318 | void nkRenderPassEncoderPushDebugGroup(NkRenderPassEncoder renderPassEncoder, const char* groupLabel) { 1319 | 1320 | } 1321 | 1322 | void nkRenderPassEncoderSetBindGroup(NkRenderPassEncoder renderPassEncoder, uint32_t groupIndex, NkBindGroup group, uint32_t dynamicOffsetCount, const uint32_t* dynamicOffsets) { 1323 | 1324 | } 1325 | 1326 | void nkRenderPassEncoderSetBlendColor(NkRenderPassEncoder renderPassEncoder, const NkColor* color) { 1327 | 1328 | } 1329 | 1330 | void nkRenderPassEncoderSetIndexBuffer(NkRenderPassEncoder renderPassEncoder, NkBuffer buffer, uint64_t offset, uint64_t size) { 1331 | 1332 | } 1333 | 1334 | typedef struct NkRenderPassEncoderSetPipelineCommand { 1335 | NkCommandType type; 1336 | } NkRenderPassEncoderSetPipelineCommand; 1337 | 1338 | void nkRenderPassEncoderSetPipeline(NkRenderPassEncoder renderPassEncoder, NkRenderPipeline pipeline) { 1339 | 1340 | NK_ASSERT(renderPassEncoder); 1341 | NK_ASSERT(pipeline); 1342 | 1343 | NkRenderPassEncoderSetPipelineCommand* command = 1344 | NK_PTR_CAST(NkRenderPassEncoderSetPipelineCommand*, 1345 | nkCommandAllocatorAllocate(renderPassEncoder->allocator, 1346 | sizeof(NkRenderPassEncoderSetPipelineCommand), 1347 | NK_ALIGN_OF(NkRenderPassEncoderSetPipelineCommand))); 1348 | NK_ASSERT(command); 1349 | 1350 | command->type = NkCommandType_RenderPassEncoderSetPipeline; 1351 | } 1352 | 1353 | void nkRenderPassEncoderSetScissorRect(NkRenderPassEncoder renderPassEncoder, uint32_t x, uint32_t y, uint32_t width, uint32_t height) { 1354 | 1355 | } 1356 | 1357 | void nkRenderPassEncoderSetStencilReference(NkRenderPassEncoder renderPassEncoder, uint32_t reference) { 1358 | 1359 | } 1360 | 1361 | typedef struct NkRenderPassEncoderSetVertexBuffer { 1362 | NkCommandType type; 1363 | } NkRenderPassEncoderSetVertexBuffer; 1364 | 1365 | void nkRenderPassEncoderSetVertexBuffer(NkRenderPassEncoder renderPassEncoder, uint32_t slot, NkBuffer buffer, uint64_t offset, uint64_t size) { 1366 | 1367 | NK_ASSERT(renderPassEncoder); 1368 | 1369 | NkRenderPassEncoderSetVertexBuffer* command = 1370 | NK_PTR_CAST(NkRenderPassEncoderSetVertexBuffer*, 1371 | nkCommandAllocatorAllocate(renderPassEncoder->allocator, 1372 | sizeof(NkRenderPassEncoderSetVertexBuffer), 1373 | NK_ALIGN_OF(NkRenderPassEncoderSetVertexBuffer))); 1374 | NK_ASSERT(command); 1375 | 1376 | command->type = NkCommandType_RenderPassEncoderSetVertexBuffer; 1377 | } 1378 | 1379 | void nkRenderPassEncoderSetViewport(NkRenderPassEncoder renderPassEncoder, float x, float y, float width, float height, float minDepth, float maxDepth) { 1380 | 1381 | } 1382 | 1383 | void nkRenderPassEncoderWriteTimestamp(NkRenderPassEncoder renderPassEncoder, NkQuerySet querySet, uint32_t queryIndex) { 1384 | 1385 | } 1386 | 1387 | #define NK_MAX_BUFFERS 16 1388 | #define NK_MAX_ATTRIBUTES 16 1389 | 1390 | #ifdef NK_VULKAN_IMPLEMENTATION 1391 | 1392 | #include 1393 | #if defined(_WIN32) 1394 | #define NOMINMAX 1395 | #define WIN32_LEAN_AND_MEAN 1396 | #include 1397 | #include 1398 | #endif 1399 | 1400 | const char* NkVkErrorString(VkResult errorCode) 1401 | { 1402 | switch (errorCode) 1403 | { 1404 | #define STR(r) case VK_ ##r: return #r 1405 | STR(NOT_READY); 1406 | STR(TIMEOUT); 1407 | STR(EVENT_SET); 1408 | STR(EVENT_RESET); 1409 | STR(INCOMPLETE); 1410 | STR(ERROR_OUT_OF_HOST_MEMORY); 1411 | STR(ERROR_OUT_OF_DEVICE_MEMORY); 1412 | STR(ERROR_INITIALIZATION_FAILED); 1413 | STR(ERROR_DEVICE_LOST); 1414 | STR(ERROR_MEMORY_MAP_FAILED); 1415 | STR(ERROR_LAYER_NOT_PRESENT); 1416 | STR(ERROR_EXTENSION_NOT_PRESENT); 1417 | STR(ERROR_FEATURE_NOT_PRESENT); 1418 | STR(ERROR_INCOMPATIBLE_DRIVER); 1419 | STR(ERROR_TOO_MANY_OBJECTS); 1420 | STR(ERROR_FORMAT_NOT_SUPPORTED); 1421 | STR(ERROR_SURFACE_LOST_KHR); 1422 | STR(ERROR_NATIVE_WINDOW_IN_USE_KHR); 1423 | STR(SUBOPTIMAL_KHR); 1424 | STR(ERROR_OUT_OF_DATE_KHR); 1425 | STR(ERROR_INCOMPATIBLE_DISPLAY_KHR); 1426 | STR(ERROR_VALIDATION_FAILED_EXT); 1427 | STR(ERROR_INVALID_SHADER_NV); 1428 | #undef STR 1429 | default: 1430 | return "UNKNOWN_ERROR"; 1431 | } 1432 | } 1433 | 1434 | #define NK_CHECK_VK(x) \ 1435 | do { \ 1436 | VkResult err = x; \ 1437 | if (err) { \ 1438 | NK_LOG("ERROR: Detected Vulkan error %string at %string:%d.\n", NkVkErrorString(err)); \ 1439 | abort(); \ 1440 | } \ 1441 | } while (0) 1442 | 1443 | #define NK_ASSERT_VK_HANDLE(handle) \ 1444 | do { \ 1445 | if ((handle) == VK_NULL_HANDLE) { \ 1446 | NK_LOG("ERROR: Handle is NULL at %string:%d.\n"); \ 1447 | abort(); \ 1448 | } \ 1449 | } while (0) 1450 | 1451 | static const char* NkDeviceExtensions[] = { 1452 | VK_KHR_SWAPCHAIN_EXTENSION_NAME 1453 | }; 1454 | 1455 | static uint32_t NkDeviceExtensionCount 1456 | = sizeof(NkDeviceExtensions) / sizeof(NkDeviceExtensions[0]); 1457 | 1458 | static const char* NkInstanceExtensions[] = { 1459 | VK_KHR_SURFACE_EXTENSION_NAME, 1460 | #if defined(_WIN32) 1461 | VK_KHR_WIN32_SURFACE_EXTENSION_NAME, 1462 | #endif 1463 | VK_EXT_DEBUG_UTILS_EXTENSION_NAME 1464 | }; 1465 | 1466 | static uint32_t NkInstanceExtensionCount 1467 | = sizeof(NkInstanceExtensions) / sizeof(NkInstanceExtensions[0]); 1468 | 1469 | static const char* NkValidationLayers[] = { 1470 | "VK_LAYER_KHRONOS_validation" 1471 | }; 1472 | 1473 | static uint32_t NkValidationLayerCount 1474 | = sizeof(NkValidationLayers) / sizeof(NkValidationLayers[0]); 1475 | 1476 | #ifdef NDEBUG 1477 | const NkBool NkEnableValidationLayers = NkFalse; 1478 | #else 1479 | const NkBool NkEnableValidationLayers = NkTrue; 1480 | #endif 1481 | 1482 | // any structs with int32_t foo are unimplemented. This is just to let the code compile in C mode, where empty structs are illegal. 1483 | 1484 | struct NkBindGroupImpl { 1485 | int32_t foo; 1486 | }; 1487 | 1488 | struct NkBindGroupLayoutImpl { 1489 | int32_t foo; 1490 | }; 1491 | 1492 | struct NkBufferImpl { 1493 | int32_t foo; 1494 | }; 1495 | 1496 | struct NkCommandBufferImpl { 1497 | int32_t foo; 1498 | }; 1499 | 1500 | struct NkComputePipelineImpl { 1501 | int32_t foo; 1502 | }; 1503 | 1504 | typedef struct NkVkQueueFamilyIndices { 1505 | uint32_t graphicsFamily; 1506 | uint32_t presentFamily; 1507 | } NkVkQueueFamilyIndices; 1508 | 1509 | struct NkQueueImpl { 1510 | VkQueue queue; 1511 | }; 1512 | 1513 | struct NkDeviceImpl { 1514 | NkInstance instance; 1515 | VkPhysicalDevice physicalDevice; 1516 | VkDevice device; 1517 | struct NkQueueImpl queue; 1518 | }; 1519 | 1520 | struct NkFenceImpl { 1521 | int32_t foo; 1522 | }; 1523 | 1524 | struct NkInstanceImpl { 1525 | VkInstance instance; 1526 | VkDebugUtilsMessengerEXT debugMessenger; 1527 | }; 1528 | 1529 | struct NkPipelineLayoutImpl { 1530 | int32_t foo; 1531 | }; 1532 | 1533 | struct NkQuerySetImpl { 1534 | int32_t foo; 1535 | }; 1536 | 1537 | struct NkRenderBundleImpl { 1538 | int32_t foo; 1539 | }; 1540 | 1541 | struct NkRenderBundleEncoderImpl { 1542 | int32_t foo; 1543 | }; 1544 | 1545 | struct NkRenderPipelineImpl { 1546 | VkPipeline pipeline; 1547 | }; 1548 | 1549 | struct NkSamplerImpl { 1550 | int32_t foo; 1551 | }; 1552 | 1553 | struct NkShaderModuleImpl { 1554 | VkDevice device; 1555 | VkShaderModule module; 1556 | }; 1557 | 1558 | struct NkSurfaceImpl { 1559 | VkInstance instance; 1560 | VkSurfaceKHR surface; 1561 | }; 1562 | 1563 | struct NkSwapChainImpl { 1564 | VkDevice device; 1565 | VkSwapchainKHR swapChain; 1566 | VkImage* swapChainImages; 1567 | uint32_t swapChainImageCount; 1568 | struct NkTextureViewImpl* swapChainTextureViews; 1569 | uint32_t currentFrame; 1570 | }; 1571 | 1572 | struct NkTextureImpl { 1573 | int32_t foo; 1574 | }; 1575 | 1576 | struct NkTextureViewImpl { 1577 | VkImageView imageView; 1578 | }; 1579 | 1580 | static NkBool nkVkCheckValidationLayerSupport() { 1581 | 1582 | uint32_t layerCount; 1583 | NK_CHECK_VK(vkEnumerateInstanceLayerProperties(&layerCount, NK_NULL)); 1584 | 1585 | if (layerCount != 0) 1586 | { 1587 | VkLayerProperties* availableLayers = NK_PTR_CAST(VkLayerProperties*, NK_MALLOC(sizeof(VkLayerProperties) * layerCount)); 1588 | NK_ASSERT(availableLayers); 1589 | 1590 | NK_CHECK_VK(vkEnumerateInstanceLayerProperties(&layerCount, availableLayers)); 1591 | 1592 | for (uint32_t i = 0; i < NkValidationLayerCount; i++) 1593 | { 1594 | const char* layerName = NkValidationLayers[i]; 1595 | NkBool layerFound = NkFalse; 1596 | 1597 | for (uint32_t j = 0; j < layerCount; j++) { 1598 | if (strcmp(layerName, availableLayers[j].layerName) == 0) { 1599 | layerFound = NkTrue; 1600 | break; 1601 | } 1602 | } 1603 | 1604 | if (!layerFound) { 1605 | return NkFalse; 1606 | } 1607 | } 1608 | 1609 | return NkTrue; 1610 | } 1611 | } 1612 | 1613 | static VKAPI_ATTR VkBool32 VKAPI_CALL nkVkDebugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageType, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, void* pUserData) { 1614 | if (messageSeverity >= VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) { 1615 | NK_LOG(pCallbackData->pMessage); 1616 | } 1617 | return VK_FALSE; 1618 | } 1619 | 1620 | static VkResult nkVkCreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pDebugMessenger) { 1621 | 1622 | NK_ASSERT_VK_HANDLE(instance); 1623 | NK_ASSERT(pCreateInfo); 1624 | NK_ASSERT(pDebugMessenger); 1625 | 1626 | PFN_vkCreateDebugUtilsMessengerEXT func = (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance, "vkCreateDebugUtilsMessengerEXT"); 1627 | if (func != NK_NULL) { 1628 | return func(instance, pCreateInfo, pAllocator, pDebugMessenger); 1629 | } 1630 | else { 1631 | return VK_ERROR_EXTENSION_NOT_PRESENT; 1632 | } 1633 | } 1634 | 1635 | static void nkVkDestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT debugMessenger, const VkAllocationCallbacks* pAllocator) { 1636 | 1637 | NK_ASSERT_VK_HANDLE(instance); 1638 | NK_ASSERT_VK_HANDLE(debugMessenger); 1639 | 1640 | PFN_vkDestroyDebugUtilsMessengerEXT func = (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance, "vkDestroyDebugUtilsMessengerEXT"); 1641 | if (func != NK_NULL) { 1642 | func(instance, debugMessenger, pAllocator); 1643 | } 1644 | } 1645 | 1646 | NkInstance nkCreateInstance() { 1647 | 1648 | if (NkEnableValidationLayers) { 1649 | NK_ASSERT(nkVkCheckValidationLayerSupport()); 1650 | } 1651 | 1652 | NkInstance instance = NK_PTR_CAST(NkInstance, NK_MALLOC(sizeof(struct NkInstanceImpl))); 1653 | NK_ASSERT(instance); 1654 | 1655 | VkApplicationInfo appInfo; 1656 | { 1657 | appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO; 1658 | appInfo.pNext = NK_NULL; 1659 | appInfo.pApplicationName = "Neko"; 1660 | appInfo.applicationVersion = VK_MAKE_VERSION(1, 0, 0); 1661 | appInfo.pEngineName = "Neko"; 1662 | appInfo.engineVersion = VK_MAKE_VERSION(1, 0, 0); 1663 | appInfo.apiVersion = VK_API_VERSION_1_0; 1664 | } 1665 | 1666 | VkInstanceCreateInfo createInfo; 1667 | { 1668 | createInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; 1669 | createInfo.pNext = NK_NULL; 1670 | createInfo.flags = 0; 1671 | createInfo.pApplicationInfo = &appInfo; 1672 | createInfo.enabledExtensionCount = sizeof(NkInstanceExtensions) / sizeof(NkInstanceExtensions[0]); 1673 | createInfo.ppEnabledExtensionNames = NkInstanceExtensions; 1674 | if (NkEnableValidationLayers) { 1675 | createInfo.enabledLayerCount = NkValidationLayerCount; 1676 | createInfo.ppEnabledLayerNames = NkValidationLayers; 1677 | } 1678 | else { 1679 | createInfo.enabledLayerCount = 0; 1680 | } 1681 | } 1682 | 1683 | NK_CHECK_VK(vkCreateInstance(&createInfo, NK_NULL, &instance->instance)); 1684 | 1685 | VkDebugUtilsMessengerCreateInfoEXT debugMessengerCreateInfo; 1686 | { 1687 | debugMessengerCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; 1688 | debugMessengerCreateInfo.flags = 0; 1689 | debugMessengerCreateInfo.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT; 1690 | debugMessengerCreateInfo.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT; 1691 | debugMessengerCreateInfo.pfnUserCallback = nkVkDebugCallback; 1692 | debugMessengerCreateInfo.pUserData = NK_NULL; 1693 | } 1694 | 1695 | NK_CHECK_VK(nkVkCreateDebugUtilsMessengerEXT(instance->instance, &debugMessengerCreateInfo, NK_NULL, &instance->debugMessenger)); 1696 | 1697 | return instance; 1698 | } 1699 | 1700 | // Methods of Buffer 1701 | void nkDestroyBuffer(NkBuffer buffer) { 1702 | 1703 | } 1704 | 1705 | const void* nkBufferGetConstMappedRange(NkBuffer buffer, size_t offset, size_t size) { 1706 | 1707 | } 1708 | 1709 | void* nkBufferGetMappedRange(NkBuffer buffer, size_t offset, size_t size) { 1710 | 1711 | } 1712 | 1713 | NkBufferMapAsyncStatus nkBufferMap(NkBuffer buffer, NkMapModeFlags mode, size_t offset, size_t size) { 1714 | 1715 | } 1716 | 1717 | void nkBufferUnmap(NkBuffer buffer) { 1718 | 1719 | } 1720 | 1721 | // Methods of ComputePipeline 1722 | NkBindGroupLayout nkComputePipelineGetBindGroupLayout(NkComputePipeline computePipeline, uint32_t groupIndex) { 1723 | 1724 | } 1725 | 1726 | // Methods of Device 1727 | void nkDestroyDevice(NkDevice device) { 1728 | 1729 | NK_ASSERT(device); 1730 | vkDestroyDevice(device->device, NK_NULL); 1731 | NK_FREE(device); 1732 | } 1733 | 1734 | NkBindGroup nkCreateBindGroup(NkDevice device, const NkBindGroupInfo* descriptor) { 1735 | 1736 | } 1737 | 1738 | NkBindGroupLayout nkCreateBindGroupLayout(NkDevice device, const NkBindGroupLayoutInfo* descriptor) { 1739 | 1740 | } 1741 | 1742 | NkBuffer nkCreateBuffer(NkDevice device, const NkBufferInfo* descriptor) { 1743 | 1744 | } 1745 | 1746 | NkComputePipeline nkCreateComputePipeline(NkDevice device, const NkComputePipelineInfo* descriptor) { 1747 | 1748 | } 1749 | 1750 | NkPipelineLayout nkCreatePipelineLayout(NkDevice device, const NkPipelineLayoutInfo* descriptor) { 1751 | 1752 | } 1753 | 1754 | NkQuerySet nkCreateQuerySet(NkDevice device, const NkQuerySetInfo* descriptor) { 1755 | 1756 | } 1757 | 1758 | NkRenderBundleEncoder nkCreateRenderBundleEncoder(NkDevice device, const NkRenderBundleEncoderInfo* descriptor) { 1759 | 1760 | } 1761 | 1762 | static VkVertexInputRate nkVkInputRate(NkInputStepMode stepMode) { 1763 | switch (stepMode) { 1764 | case NkInputStepMode_Vertex: 1765 | return VK_VERTEX_INPUT_RATE_VERTEX; 1766 | case NkInputStepMode_Instance: 1767 | return VK_VERTEX_INPUT_RATE_INSTANCE; 1768 | } 1769 | return VK_VERTEX_INPUT_RATE_MAX_ENUM; 1770 | } 1771 | 1772 | static VkFormat nkVkFormat(NkVertexFormat format) { 1773 | switch (format) { 1774 | case NkVertexFormat_UChar2: 1775 | return VK_FORMAT_R8G8_UINT; 1776 | case NkVertexFormat_UChar4: 1777 | return VK_FORMAT_R8G8B8A8_UINT; 1778 | case NkVertexFormat_Char2: 1779 | return VK_FORMAT_R8G8_SINT; 1780 | case NkVertexFormat_Char4: 1781 | return VK_FORMAT_R8G8B8A8_SINT; 1782 | case NkVertexFormat_UChar2Norm: 1783 | return VK_FORMAT_R8G8_UNORM; 1784 | case NkVertexFormat_UChar4Norm: 1785 | return VK_FORMAT_R8G8B8A8_UNORM; 1786 | case NkVertexFormat_Char2Norm: 1787 | return VK_FORMAT_R8G8_SNORM; 1788 | case NkVertexFormat_Char4Norm: 1789 | return VK_FORMAT_R8G8B8A8_SNORM; 1790 | case NkVertexFormat_UShort2: 1791 | return VK_FORMAT_R16G16_UINT; 1792 | case NkVertexFormat_UShort4: 1793 | return VK_FORMAT_R16G16B16A16_UINT; 1794 | case NkVertexFormat_Short2: 1795 | return VK_FORMAT_R16G16_SINT; 1796 | case NkVertexFormat_Short4: 1797 | return VK_FORMAT_R16G16B16A16_SINT; 1798 | case NkVertexFormat_UShort2Norm: 1799 | return VK_FORMAT_R16G16_UNORM; 1800 | case NkVertexFormat_UShort4Norm: 1801 | return VK_FORMAT_R16G16B16A16_UNORM; 1802 | case NkVertexFormat_Short2Norm: 1803 | return VK_FORMAT_R16G16_SNORM; 1804 | case NkVertexFormat_Short4Norm: 1805 | return VK_FORMAT_R16G16B16A16_SNORM; 1806 | case NkVertexFormat_Half2: 1807 | return VK_FORMAT_R16G16_SFLOAT; 1808 | case NkVertexFormat_Half4: 1809 | return VK_FORMAT_R16G16B16A16_SFLOAT; 1810 | case NkVertexFormat_Float: 1811 | return VK_FORMAT_R32_SFLOAT; 1812 | case NkVertexFormat_Float2: 1813 | return VK_FORMAT_R32G32_SFLOAT; 1814 | case NkVertexFormat_Float3: 1815 | return VK_FORMAT_R32G32B32_SFLOAT; 1816 | case NkVertexFormat_Float4: 1817 | return VK_FORMAT_R32G32B32A32_SFLOAT; 1818 | case NkVertexFormat_UInt: 1819 | return VK_FORMAT_R32_UINT; 1820 | case NkVertexFormat_UInt2: 1821 | return VK_FORMAT_R32G32_UINT; 1822 | case NkVertexFormat_UInt3: 1823 | return VK_FORMAT_R32G32B32_UINT; 1824 | case NkVertexFormat_UInt4: 1825 | return VK_FORMAT_R32G32B32A32_UINT; 1826 | case NkVertexFormat_Int: 1827 | return VK_FORMAT_R32_SINT; 1828 | case NkVertexFormat_Int2: 1829 | return VK_FORMAT_R32G32_SINT; 1830 | case NkVertexFormat_Int3: 1831 | return VK_FORMAT_R32G32B32_SINT; 1832 | case NkVertexFormat_Int4: 1833 | return VK_FORMAT_R32G32B32A32_SINT; 1834 | } 1835 | return VK_FORMAT_MAX_ENUM; 1836 | } 1837 | 1838 | static VkPrimitiveTopology nkVkPrimitiveTopology(NkPrimitiveTopology topology) { 1839 | switch (topology) { 1840 | case NkPrimitiveTopology_PointList: 1841 | return VK_PRIMITIVE_TOPOLOGY_POINT_LIST; 1842 | case NkPrimitiveTopology_LineList: 1843 | return VK_PRIMITIVE_TOPOLOGY_LINE_LIST; 1844 | case NkPrimitiveTopology_LineStrip: 1845 | return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP; 1846 | case NkPrimitiveTopology_TriangleList: 1847 | return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; 1848 | case NkPrimitiveTopology_TriangleStrip: 1849 | return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP; 1850 | } 1851 | return VK_PRIMITIVE_TOPOLOGY_MAX_ENUM; 1852 | } 1853 | 1854 | VkBool32 nkVkShouldEnablePrimitiveRestart(NkPrimitiveTopology topology) { 1855 | // Primitive restart is always enabled in Neko (due to Metal always enabling primitive restart) 1856 | // but Vulkan validation rules ask that primitive restart be only enabled on primitive topologies 1857 | // that support restarting. 1858 | 1859 | switch (topology) { 1860 | case NkPrimitiveTopology_PointList: 1861 | case NkPrimitiveTopology_LineList: 1862 | case NkPrimitiveTopology_TriangleList: 1863 | return NkFalse; 1864 | case NkPrimitiveTopology_LineStrip: 1865 | case NkPrimitiveTopology_TriangleStrip: 1866 | return NkTrue; 1867 | } 1868 | } 1869 | 1870 | NkRenderPipeline nkCreateRenderPipeline(NkDevice device, const NkRenderPipelineInfo* descriptor) { 1871 | 1872 | NK_ASSERT(device); 1873 | NK_ASSERT(descriptor); 1874 | 1875 | NkRenderPipeline renderPipeline = 1876 | NK_PTR_CAST(NkRenderPipeline, NK_MALLOC(sizeof(struct NkRenderPipelineImpl))); 1877 | NK_ASSERT(renderPipeline); 1878 | 1879 | VkGraphicsPipelineCreateInfo createInfo; 1880 | { 1881 | createInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; 1882 | createInfo.pNext = NULL; 1883 | createInfo.flags = 0; 1884 | 1885 | createInfo.pTessellationState = NULL; 1886 | createInfo.pViewportState = NULL; 1887 | 1888 | VkPipelineShaderStageCreateInfo vertShaderStageInfo; 1889 | { 1890 | vertShaderStageInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; 1891 | vertShaderStageInfo.pNext = NULL; 1892 | vertShaderStageInfo.flags = 0; 1893 | vertShaderStageInfo.stage = VK_SHADER_STAGE_VERTEX_BIT; 1894 | vertShaderStageInfo.module = descriptor->vertexStage.module->module; 1895 | vertShaderStageInfo.pName = descriptor->vertexStage.entryPoint; 1896 | vertShaderStageInfo.pSpecializationInfo = NULL; 1897 | } 1898 | 1899 | VkPipelineShaderStageCreateInfo fragShaderStageInfo; 1900 | { 1901 | fragShaderStageInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; 1902 | fragShaderStageInfo.pNext = NULL; 1903 | fragShaderStageInfo.flags = 0; 1904 | fragShaderStageInfo.stage = VK_SHADER_STAGE_FRAGMENT_BIT; 1905 | fragShaderStageInfo.module = descriptor->fragmentStage.module->module; 1906 | fragShaderStageInfo.pName = descriptor->fragmentStage.entryPoint; 1907 | fragShaderStageInfo.pSpecializationInfo = NULL; 1908 | } 1909 | 1910 | VkPipelineShaderStageCreateInfo shaderStages[] = { vertShaderStageInfo, fragShaderStageInfo }; 1911 | 1912 | createInfo.pStages = shaderStages; 1913 | createInfo.stageCount = 2; 1914 | 1915 | // TODO: set up default vertex state info when user doesn't supply a custom one. 1916 | // For now just assert it's not NULL. 1917 | NK_ASSERT(descriptor->vertexState); 1918 | 1919 | uint32_t bindingCount = 0; 1920 | VkVertexInputBindingDescription bindings[NK_MAX_BUFFERS]; 1921 | 1922 | uint32_t attributeCount = 0; 1923 | VkVertexInputAttributeDescription attributes[NK_MAX_ATTRIBUTES]; 1924 | 1925 | VkPipelineVertexInputStateCreateInfo vertexInputInfo; 1926 | { 1927 | for (uint32_t slot = 0; slot < descriptor->vertexState->vertexBufferCount; slot++) { 1928 | 1929 | NkVertexBufferLayoutInfo* vertexBufferLayoutInfo = 1930 | descriptor->vertexState->vertexBuffers + slot; 1931 | 1932 | if (vertexBufferLayoutInfo->attributeCount == 0) { 1933 | continue; 1934 | } 1935 | 1936 | VkVertexInputBindingDescription* bindingDesc = bindings + bindingCount; 1937 | bindingDesc->binding = slot; 1938 | bindingDesc->stride = vertexBufferLayoutInfo->arrayStride; 1939 | bindingDesc->inputRate = nkVkInputRate(vertexBufferLayoutInfo->stepMode); 1940 | bindingCount++; 1941 | 1942 | for (uint32_t attributeIndex = 0; attributeIndex < vertexBufferLayoutInfo->attributeCount; attributeIndex++) { 1943 | 1944 | NkVertexAttributeInfo* vertexAttributeInfo = 1945 | vertexBufferLayoutInfo->attributes + attributeIndex; 1946 | 1947 | VkVertexInputAttributeDescription* attributeDesc = attributes + attributeCount; 1948 | attributeDesc->location = vertexAttributeInfo->shaderLocation; 1949 | attributeDesc->binding = slot; 1950 | attributeDesc->format = nkVkFormat(vertexAttributeInfo->format); 1951 | attributeDesc->offset = vertexAttributeInfo->offset; 1952 | 1953 | attributeCount++; 1954 | } 1955 | } 1956 | 1957 | vertexInputInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; 1958 | vertexInputInfo.pNext = NULL; 1959 | vertexInputInfo.flags = 0; 1960 | vertexInputInfo.vertexBindingDescriptionCount = bindingCount; 1961 | vertexInputInfo.pVertexBindingDescriptions = bindings; 1962 | vertexInputInfo.vertexAttributeDescriptionCount = attributeCount; 1963 | vertexInputInfo.pVertexAttributeDescriptions = attributes; 1964 | createInfo.pVertexInputState = &vertexInputInfo; 1965 | } 1966 | 1967 | VkPipelineInputAssemblyStateCreateInfo inputAssembly; 1968 | { 1969 | inputAssembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; 1970 | inputAssembly.pNext = NULL; 1971 | inputAssembly.flags = 0; 1972 | inputAssembly.topology = nkVkPrimitiveTopology(descriptor->primitiveTopology); 1973 | inputAssembly.primitiveRestartEnable = nkVkShouldEnablePrimitiveRestart(descriptor->primitiveTopology); 1974 | createInfo.pInputAssemblyState = &inputAssembly; 1975 | } 1976 | 1977 | VkPipelineRasterizationStateCreateInfo rasterizer; 1978 | { 1979 | rasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; 1980 | rasterizer.pNext = NULL; 1981 | rasterizer.flags = 0; 1982 | rasterizer.rasterizerDiscardEnable = VK_FALSE; 1983 | rasterizer.polygonMode = VK_POLYGON_MODE_FILL; 1984 | rasterizer.lineWidth = 1.0f; 1985 | rasterizer.cullMode = VK_CULL_MODE_BACK_BIT; 1986 | rasterizer.frontFace = VK_FRONT_FACE_CLOCKWISE; 1987 | rasterizer.depthBiasEnable = VK_FALSE; 1988 | rasterizer.depthBiasConstantFactor = 0.0f; // Optional 1989 | rasterizer.depthBiasClamp = 0.0f; // Optional 1990 | rasterizer.depthBiasSlopeFactor = 0.0f; // Optional 1991 | rasterizer.depthClampEnable = VK_FALSE; 1992 | createInfo.pRasterizationState = &rasterizer; 1993 | } 1994 | 1995 | VkPipelineMultisampleStateCreateInfo multisampling; 1996 | { 1997 | multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; 1998 | multisampling.pNext = NULL; 1999 | multisampling.flags = 0; 2000 | multisampling.sampleShadingEnable = VK_FALSE; 2001 | multisampling.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT; 2002 | multisampling.minSampleShading = 1.0f; // Optional 2003 | multisampling.pSampleMask = NULL; // Optional 2004 | multisampling.alphaToCoverageEnable = VK_FALSE; // Optional 2005 | multisampling.alphaToOneEnable = VK_FALSE; // Optional 2006 | createInfo.pMultisampleState = &multisampling; 2007 | } 2008 | 2009 | VkPipelineColorBlendAttachmentState colorBlendAttachment; 2010 | { 2011 | colorBlendAttachment.colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT; 2012 | colorBlendAttachment.blendEnable = VK_FALSE; 2013 | colorBlendAttachment.srcColorBlendFactor = VK_BLEND_FACTOR_ONE; // Optional 2014 | colorBlendAttachment.dstColorBlendFactor = VK_BLEND_FACTOR_ZERO; // Optional 2015 | colorBlendAttachment.colorBlendOp = VK_BLEND_OP_ADD; // Optional 2016 | colorBlendAttachment.srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE; // Optional 2017 | colorBlendAttachment.dstAlphaBlendFactor = VK_BLEND_FACTOR_ZERO; // Optional 2018 | colorBlendAttachment.alphaBlendOp = VK_BLEND_OP_ADD; // Optional 2019 | } 2020 | 2021 | VkPipelineColorBlendStateCreateInfo colorBlending; 2022 | { 2023 | colorBlending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; 2024 | colorBlending.pNext = NULL; 2025 | colorBlending.flags = 0; 2026 | colorBlending.logicOpEnable = VK_FALSE; 2027 | colorBlending.logicOp = VK_LOGIC_OP_COPY; // Optional 2028 | colorBlending.attachmentCount = 1; 2029 | colorBlending.pAttachments = &colorBlendAttachment; 2030 | colorBlending.blendConstants[0] = 0.0f; // Optional 2031 | colorBlending.blendConstants[1] = 0.0f; // Optional 2032 | colorBlending.blendConstants[2] = 0.0f; // Optional 2033 | colorBlending.blendConstants[3] = 0.0f; // Optional 2034 | createInfo.pColorBlendState = &colorBlending; 2035 | } 2036 | 2037 | VkDynamicState dynamicStates[] = { 2038 | VK_DYNAMIC_STATE_VIEWPORT, 2039 | VK_DYNAMIC_STATE_LINE_WIDTH 2040 | }; 2041 | 2042 | VkPipelineDynamicStateCreateInfo dynamicState; 2043 | { 2044 | dynamicState.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; 2045 | dynamicState.pNext = NULL; 2046 | dynamicState.flags = 0; 2047 | dynamicState.dynamicStateCount = 2; 2048 | dynamicState.pDynamicStates = dynamicStates; 2049 | createInfo.pDynamicState = &dynamicState; 2050 | } 2051 | 2052 | createInfo.pDepthStencilState = NULL; 2053 | 2054 | createInfo.layout = VK_NULL_HANDLE; // TODO 2055 | } 2056 | 2057 | NK_CHECK_VK(vkCreateGraphicsPipelines(device->device, VK_NULL_HANDLE, 1, &createInfo, NULL, &renderPipeline->pipeline)); 2058 | 2059 | return renderPipeline; 2060 | } 2061 | 2062 | NkSampler nkCreateSampler(NkDevice device, const NkSamplerInfo* descriptor) { 2063 | 2064 | } 2065 | 2066 | NkShaderModule nkCreateShaderModule(NkDevice device, const NkShaderModuleInfo* descriptor) { 2067 | 2068 | NK_ASSERT(device); 2069 | NK_ASSERT(descriptor); 2070 | 2071 | NkShaderModule shaderModule = NK_PTR_CAST(NkShaderModule, NK_MALLOC(sizeof(struct NkShaderModuleImpl))); 2072 | NK_ASSERT(shaderModule); 2073 | 2074 | // SPIR-V code is passed to Vulkan as an array of uint32_t. Neko's interface is generalised so it takes IR 2075 | // as a void pointer. Unfortunately that means that someone could feasibly feed it a byte buffer that is not 2076 | // aligned correctly. This is unlikely to happen as I think most general allocators will make sure that the 2077 | // data satisfies the worst-case alignment requirements, but just in case, we're just making sure the buffer 2078 | // is suitably aligned before we cast the pointer to a uint32_t. 2079 | NK_ASSERT(NK_IS_PTR_ALIGNED(descriptor->source, NK_ALIGN_OF(uint32_t))); 2080 | 2081 | VkShaderModuleCreateInfo shaderInfo; 2082 | { 2083 | shaderInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; 2084 | shaderInfo.pNext = NK_NULL; 2085 | shaderInfo.flags = 0; 2086 | shaderInfo.codeSize = descriptor->size; 2087 | shaderInfo.pCode = NK_PTR_CAST(const uint32_t*, descriptor->source); 2088 | } 2089 | 2090 | NK_CHECK_VK(vkCreateShaderModule(device->device, &shaderInfo, NK_NULL, &shaderModule->module)); 2091 | 2092 | return shaderModule; 2093 | } 2094 | 2095 | void nkDestroyShaderModule(NkShaderModule shaderModule) { 2096 | 2097 | NK_ASSERT(shaderModule); 2098 | vkDestroyShaderModule(shaderModule->device, shaderModule->module, NULL); 2099 | NK_FREE(shaderModule); 2100 | } 2101 | 2102 | typedef struct NkVkSurfaceSupportDetails { 2103 | VkSurfaceCapabilitiesKHR capabilities; 2104 | VkSurfaceFormatKHR* formats; 2105 | uint32_t formatCount; 2106 | VkPresentModeKHR* presentModes; 2107 | uint32_t presentModeCount; 2108 | } NkVkSurfaceSupportDetails; 2109 | 2110 | static NkVkSurfaceSupportDetails nkVkCreateSurfaceSupportDetails(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface) { 2111 | 2112 | NK_ASSERT_VK_HANDLE(physicalDevice); 2113 | NK_ASSERT_VK_HANDLE(surface); 2114 | 2115 | NkVkSurfaceSupportDetails details; 2116 | { 2117 | details.formats = NK_NULL; 2118 | details.formatCount = 0; 2119 | details.presentModes = NK_NULL; 2120 | details.presentModeCount = 0; 2121 | } 2122 | 2123 | vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, &details.capabilities); 2124 | 2125 | vkGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, &details.formatCount, NK_NULL); 2126 | 2127 | if (details.formatCount != 0) { 2128 | details.formats = NK_PTR_CAST(VkSurfaceFormatKHR*, NK_MALLOC(details.formatCount * sizeof(VkSurfaceFormatKHR))); 2129 | NK_ASSERT(details.formats); 2130 | 2131 | vkGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, &details.formatCount, details.formats); 2132 | } 2133 | 2134 | vkGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, &details.presentModeCount, NK_NULL); 2135 | 2136 | if (details.presentModeCount != 0) { 2137 | details.presentModes = NK_PTR_CAST(VkPresentModeKHR*, NK_MALLOC(details.presentModeCount * sizeof(VkPresentModeKHR))); 2138 | NK_ASSERT(details.presentModes); 2139 | 2140 | vkGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, &details.presentModeCount, details.presentModes); 2141 | } 2142 | 2143 | return details; 2144 | } 2145 | 2146 | static NkVkSurfaceSupportDetails nkVkDestroySurfaceSupportDetails(NkVkSurfaceSupportDetails* details) { 2147 | 2148 | NK_ASSERT(details); 2149 | NK_FREE(details->formats); 2150 | NK_FREE(details->presentModes); 2151 | } 2152 | 2153 | static VkPresentModeKHR nkVkChooseSwapPresentMode(VkPresentModeKHR const* availablePresentModes, uint32_t availablePresentModeCount) { 2154 | 2155 | NK_ASSERT(availablePresentModes); 2156 | NK_ASSERT(availablePresentModeCount != 0); 2157 | 2158 | for (size_t i = 0; i < availablePresentModeCount - 1; i++) { 2159 | if (availablePresentModes[i] == VK_PRESENT_MODE_MAILBOX_KHR) { 2160 | return availablePresentModes[i]; 2161 | } 2162 | } 2163 | 2164 | return VK_PRESENT_MODE_FIFO_KHR; 2165 | } 2166 | 2167 | static VkSurfaceFormatKHR nkVkChooseSwapSurfaceFormat(VkSurfaceFormatKHR const* availableFormats, uint32_t availableFormatCount) { 2168 | 2169 | NK_ASSERT(availableFormats); 2170 | NK_ASSERT(availableFormatCount != 0); 2171 | 2172 | for (size_t i = 0; i < availableFormatCount - 1; i++) { 2173 | if (availableFormats[i].format == VK_FORMAT_B8G8R8A8_SRGB && availableFormats[i].colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR) { 2174 | return availableFormats[i]; 2175 | } 2176 | } 2177 | 2178 | return availableFormats[0]; 2179 | } 2180 | 2181 | static VkExtent2D nkVkChooseSwapExtent(VkSurfaceCapabilitiesKHR const* capabilities, const NkSwapChainInfo* info) { 2182 | 2183 | NK_ASSERT(capabilities); 2184 | NK_ASSERT(info); 2185 | 2186 | if (capabilities->currentExtent.width != UINT32_MAX) { 2187 | return capabilities->currentExtent; 2188 | } else { 2189 | VkExtent2D actualExtent = { 2190 | info->width, 2191 | info->height 2192 | }; 2193 | 2194 | actualExtent.width 2195 | = NK_MAX(capabilities->minImageExtent.width, NK_MIN(capabilities->maxImageExtent.width, actualExtent.width)); 2196 | 2197 | actualExtent.height 2198 | = NK_MAX(capabilities->minImageExtent.height, NK_MIN(capabilities->maxImageExtent.height, actualExtent.height)); 2199 | 2200 | return actualExtent; 2201 | } 2202 | } 2203 | 2204 | static NkVkQueueFamilyIndices nkVkFindQueueFamilies(VkPhysicalDevice device, VkSurfaceKHR surface) { 2205 | 2206 | NK_ASSERT_VK_HANDLE(device); 2207 | NK_ASSERT_VK_HANDLE(surface); 2208 | 2209 | NkVkQueueFamilyIndices indices; 2210 | { 2211 | indices.graphicsFamily = UINT32_MAX; 2212 | indices.presentFamily = UINT32_MAX; 2213 | } 2214 | 2215 | uint32_t queueFamilyCount = 0; 2216 | vkGetPhysicalDeviceQueueFamilyProperties(device, &queueFamilyCount, NK_NULL); 2217 | 2218 | VkQueueFamilyProperties* queueFamilies = NK_PTR_CAST(VkQueueFamilyProperties*, NK_MALLOC(sizeof(VkQueueFamilyProperties) * queueFamilyCount)); 2219 | NK_ASSERT(queueFamilies); 2220 | 2221 | vkGetPhysicalDeviceQueueFamilyProperties(device, &queueFamilyCount, queueFamilies); 2222 | 2223 | for (uint32_t i = 0; i < queueFamilyCount; i++) { 2224 | 2225 | if (queueFamilies[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) { 2226 | indices.graphicsFamily = i; 2227 | } 2228 | 2229 | if (surface != VK_NULL_HANDLE) { 2230 | 2231 | VkBool32 presentSupport = VK_FALSE; 2232 | NK_CHECK_VK(vkGetPhysicalDeviceSurfaceSupportKHR(device, i, surface, &presentSupport)); 2233 | 2234 | if (presentSupport) { 2235 | indices.presentFamily = i; 2236 | } 2237 | } 2238 | 2239 | if (indices.graphicsFamily != UINT32_MAX && 2240 | indices.presentFamily != UINT32_MAX) { 2241 | break; 2242 | } 2243 | } 2244 | 2245 | NK_FREE(queueFamilies); 2246 | 2247 | return indices; 2248 | } 2249 | 2250 | NkSwapChain nkCreateSwapChain(NkDevice device, NkSurface surface, const NkSwapChainInfo* info) { 2251 | 2252 | NK_ASSERT(device); 2253 | NK_ASSERT(surface); 2254 | NK_ASSERT(info); 2255 | 2256 | NkSwapChain swapChain = NK_PTR_CAST(NkSwapChain, NK_MALLOC(sizeof(struct NkSwapChainImpl))); 2257 | NK_ASSERT(swapChain); 2258 | 2259 | swapChain->currentFrame = 0; 2260 | 2261 | NkVkSurfaceSupportDetails surfaceSupport = nkVkCreateSurfaceSupportDetails(device->physicalDevice, surface->surface); 2262 | VkSurfaceFormatKHR surfaceFormat = nkVkChooseSwapSurfaceFormat(surfaceSupport.formats, surfaceSupport.formatCount); 2263 | VkPresentModeKHR presentMode = nkVkChooseSwapPresentMode(surfaceSupport.presentModes, surfaceSupport.presentModeCount); 2264 | VkExtent2D extent = nkVkChooseSwapExtent(&surfaceSupport.capabilities, info); 2265 | 2266 | uint32_t imageCount = surfaceSupport.capabilities.minImageCount + 1; 2267 | if (surfaceSupport.capabilities.maxImageCount > 0 && imageCount > surfaceSupport.capabilities.maxImageCount) { 2268 | imageCount = surfaceSupport.capabilities.maxImageCount; 2269 | } 2270 | 2271 | NkVkQueueFamilyIndices indices = 2272 | nkVkFindQueueFamilies(device->physicalDevice, surface->surface); 2273 | 2274 | uint32_t queueFamilyIndices[] = { indices.graphicsFamily, indices.presentFamily }; 2275 | 2276 | VkSwapchainCreateInfoKHR createInfo; 2277 | { 2278 | createInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; 2279 | createInfo.flags = 0; 2280 | createInfo.pNext = NK_NULL; 2281 | createInfo.surface = surface->surface; 2282 | createInfo.minImageCount = imageCount; 2283 | createInfo.imageFormat = surfaceFormat.format; 2284 | createInfo.imageColorSpace = surfaceFormat.colorSpace; 2285 | createInfo.imageExtent = extent; 2286 | createInfo.imageArrayLayers = 1; 2287 | createInfo.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; 2288 | 2289 | if (indices.graphicsFamily != indices.presentFamily) { 2290 | createInfo.imageSharingMode = VK_SHARING_MODE_CONCURRENT; 2291 | createInfo.queueFamilyIndexCount = 2; 2292 | createInfo.pQueueFamilyIndices = queueFamilyIndices; 2293 | } 2294 | else { 2295 | createInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; 2296 | createInfo.queueFamilyIndexCount = 0; 2297 | createInfo.pQueueFamilyIndices = NK_NULL; 2298 | } 2299 | 2300 | createInfo.preTransform = surfaceSupport.capabilities.currentTransform; 2301 | createInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; 2302 | createInfo.presentMode = presentMode; 2303 | createInfo.clipped = VK_TRUE; 2304 | 2305 | createInfo.oldSwapchain = VK_NULL_HANDLE; 2306 | } 2307 | NK_CHECK_VK(vkCreateSwapchainKHR(device->device, &createInfo, NK_NULL, &swapChain->swapChain)); 2308 | 2309 | swapChain->device = device->device; 2310 | 2311 | NK_CHECK_VK(vkGetSwapchainImagesKHR(device->device, swapChain->swapChain, &swapChain->swapChainImageCount, NK_NULL)); 2312 | 2313 | swapChain->swapChainImages = NK_PTR_CAST(VkImage*, NK_MALLOC(sizeof(VkImage) * swapChain->swapChainImageCount)); 2314 | NK_ASSERT(swapChain->swapChainImages); 2315 | 2316 | NK_CHECK_VK(vkGetSwapchainImagesKHR(device->device, swapChain->swapChain, &swapChain->swapChainImageCount, swapChain->swapChainImages)); 2317 | 2318 | nkVkDestroySurfaceSupportDetails(&surfaceSupport); 2319 | 2320 | swapChain->swapChainTextureViews = NK_PTR_CAST(struct NkTextureViewImpl*, NK_MALLOC(sizeof(struct NkTextureViewImpl) * swapChain->swapChainImageCount)); 2321 | NK_ASSERT(swapChain->swapChainTextureViews); 2322 | 2323 | for (size_t i = 0; i < swapChain->swapChainImageCount; i++) { 2324 | VkImageViewCreateInfo imageViewCreateInfo; 2325 | { 2326 | imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; 2327 | imageViewCreateInfo.flags = 0; 2328 | imageViewCreateInfo.pNext = NK_NULL; 2329 | imageViewCreateInfo.image = swapChain->swapChainImages[i]; 2330 | imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D; 2331 | imageViewCreateInfo.format = surfaceFormat.format; 2332 | imageViewCreateInfo.components.r = VK_COMPONENT_SWIZZLE_IDENTITY; 2333 | imageViewCreateInfo.components.g = VK_COMPONENT_SWIZZLE_IDENTITY; 2334 | imageViewCreateInfo.components.b = VK_COMPONENT_SWIZZLE_IDENTITY; 2335 | imageViewCreateInfo.components.a = VK_COMPONENT_SWIZZLE_IDENTITY; 2336 | imageViewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; 2337 | imageViewCreateInfo.subresourceRange.baseMipLevel = 0; 2338 | imageViewCreateInfo.subresourceRange.levelCount = 1; 2339 | imageViewCreateInfo.subresourceRange.baseArrayLayer = 0; 2340 | imageViewCreateInfo.subresourceRange.layerCount = 1; 2341 | } 2342 | NK_CHECK_VK(vkCreateImageView(device->device, &imageViewCreateInfo, NK_NULL, &swapChain->swapChainTextureViews[i].imageView)); 2343 | } 2344 | 2345 | return swapChain; 2346 | } 2347 | 2348 | NkTexture nkCreateTexture(NkDevice device, const NkTextureInfo* descriptor) { 2349 | 2350 | } 2351 | 2352 | NkQueue nkDeviceGetDefaultQueue(NkDevice device) { 2353 | return &device->queue; 2354 | } 2355 | 2356 | NkBool nkDevicePopErrorScope(NkDevice device, NkErrorCallback callback, void* userdata) { 2357 | 2358 | } 2359 | 2360 | void nkDevicePushErrorScope(NkDevice device, NkErrorFilter filter) { 2361 | 2362 | } 2363 | 2364 | void nkDeviceSetDeviceLostCallback(NkDevice device, NkDeviceLostCallback callback, void* userdata) { 2365 | 2366 | } 2367 | 2368 | void nkDeviceSetUncapturedErrorCallback(NkDevice device, NkErrorCallback callback, void* userdata) { 2369 | 2370 | } 2371 | 2372 | // Methods of Fence 2373 | void nkDeviceFence(NkFence fence) { 2374 | 2375 | } 2376 | 2377 | uint64_t nkFenceGetCompletedValue(NkFence fence) { 2378 | 2379 | } 2380 | 2381 | void nkFenceOnCompletion(NkFence fence, uint64_t value, NkFenceOnCompletionCallback callback, void* userdata) { 2382 | 2383 | } 2384 | 2385 | // Methods of Instance 2386 | void nkDestroyInstance(NkInstance instance) { 2387 | 2388 | NK_ASSERT(instance); 2389 | 2390 | if (NkEnableValidationLayers) { 2391 | nkVkDestroyDebugUtilsMessengerEXT(instance->instance, instance->debugMessenger, NK_NULL); 2392 | } 2393 | vkDestroyInstance(instance->instance, NK_NULL); 2394 | NK_FREE(instance); 2395 | } 2396 | 2397 | NkSurface nkCreateSurface(NkInstance instance, const NkSurfaceInfo* descriptor) { 2398 | 2399 | NK_ASSERT(instance); 2400 | NK_ASSERT(descriptor); 2401 | 2402 | NkSurface surface = NK_PTR_CAST(NkSurface, NK_MALLOC(sizeof(struct NkSurfaceImpl))); 2403 | NK_ASSERT(surface); 2404 | 2405 | surface->instance = instance->instance; 2406 | 2407 | #if defined(_WIN32) 2408 | VkWin32SurfaceCreateInfoKHR createInfo; 2409 | { 2410 | createInfo.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR; 2411 | createInfo.pNext = NK_NULL; 2412 | createInfo.flags = 0; 2413 | createInfo.hwnd = descriptor->native.hwnd; 2414 | createInfo.hinstance = descriptor->native.hinstance; 2415 | } 2416 | NK_CHECK_VK(vkCreateWin32SurfaceKHR(instance->instance, &createInfo, NK_NULL, &surface->surface)); 2417 | #endif 2418 | 2419 | return surface; 2420 | } 2421 | 2422 | static const char* NkVkDeviceEnabledExtensionNames[] = { 2423 | VK_KHR_SWAPCHAIN_EXTENSION_NAME, 2424 | }; 2425 | 2426 | static const uint32_t NkVkDeviceEnabledExtensionCount 2427 | = sizeof(NkVkDeviceEnabledExtensionNames) / sizeof(NkVkDeviceEnabledExtensionNames[0]); 2428 | 2429 | static NkBool nkVkCheckDeviceExtensionProperties(VkExtensionProperties* properties, uint32_t propertyCount) { 2430 | 2431 | NK_ASSERT(properties); 2432 | NK_ASSERT(propertyCount != 0); 2433 | 2434 | for (uint32_t i = 0; i < NkVkDeviceEnabledExtensionCount; i++) { 2435 | uint32_t enabledExtensionCount = 0; 2436 | for (uint32_t j = 0; j < propertyCount - 1; j++) { 2437 | if (strcmp(NkVkDeviceEnabledExtensionNames[i], properties[j].extensionName) == 0) { 2438 | ++enabledExtensionCount; 2439 | } 2440 | if (enabledExtensionCount == NkVkDeviceEnabledExtensionCount) { 2441 | return NkTrue; 2442 | } 2443 | } 2444 | } 2445 | return NkFalse; 2446 | } 2447 | 2448 | static NkBool nkVkIsPhysicalDeviceSuitable(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface) { 2449 | 2450 | NK_ASSERT_VK_HANDLE(physicalDevice); 2451 | NK_ASSERT_VK_HANDLE(surface); 2452 | 2453 | uint32_t propertyCount = 0; 2454 | 2455 | NK_CHECK_VK(vkEnumerateDeviceExtensionProperties(physicalDevice, NK_NULL, &propertyCount, NK_NULL)); 2456 | 2457 | VkExtensionProperties* properties = NK_PTR_CAST(VkExtensionProperties*, NK_MALLOC(sizeof(VkExtensionProperties) * propertyCount)); 2458 | NK_ASSERT(properties); 2459 | 2460 | NK_CHECK_VK(vkEnumerateDeviceExtensionProperties(physicalDevice, NK_NULL, &propertyCount, properties)); 2461 | 2462 | NkVkQueueFamilyIndices queueFamilyIndices = nkVkFindQueueFamilies(physicalDevice, surface); 2463 | 2464 | NkBool indicesIsComplete = queueFamilyIndices.graphicsFamily != UINT32_MAX && 2465 | queueFamilyIndices.presentFamily != UINT32_MAX; 2466 | 2467 | NkBool extensionsSupported = nkVkCheckDeviceExtensionProperties(properties, propertyCount); 2468 | 2469 | NkBool surfaceAdequate = NkFalse; 2470 | if (extensionsSupported) { 2471 | NkVkSurfaceSupportDetails details = nkVkCreateSurfaceSupportDetails(physicalDevice, surface); 2472 | surfaceAdequate = details.formatCount != 0 && details.presentModeCount != 0; 2473 | nkVkDestroySurfaceSupportDetails(&details); 2474 | } 2475 | 2476 | NK_FREE(properties); 2477 | 2478 | return indicesIsComplete && extensionsSupported && surfaceAdequate; 2479 | } 2480 | 2481 | NkDevice nkCreateDevice(NkInstance instance, NkSurface surface) { 2482 | 2483 | NK_ASSERT(instance); 2484 | NK_ASSERT(surface); 2485 | 2486 | NkDevice device = NK_PTR_CAST(NkDevice, NK_MALLOC(sizeof(struct NkDeviceImpl))); 2487 | NK_ASSERT(device); 2488 | 2489 | device->instance = instance; 2490 | 2491 | // select physical device 2492 | 2493 | uint32_t physicalDeviceCount = 0; 2494 | NK_CHECK_VK(vkEnumeratePhysicalDevices(instance->instance, &physicalDeviceCount, NK_NULL)); 2495 | 2496 | VkPhysicalDevice* physicalDevices = 2497 | NK_PTR_CAST(VkPhysicalDevice*, NK_MALLOC(sizeof(VkPhysicalDevice) * physicalDeviceCount)); 2498 | NK_ASSERT(physicalDevices); 2499 | 2500 | vkEnumeratePhysicalDevices(instance->instance, &physicalDeviceCount, physicalDevices); 2501 | 2502 | VkSurfaceKHR vkSurface = surface->surface; 2503 | 2504 | device->physicalDevice = VK_NULL_HANDLE; 2505 | for (size_t i = 0; i < physicalDeviceCount; i++) { 2506 | if (nkVkIsPhysicalDeviceSuitable(physicalDevices[i], vkSurface)) { 2507 | device->physicalDevice = physicalDevices[i]; 2508 | break; 2509 | } 2510 | } 2511 | NK_ASSERT_VK_HANDLE(device->physicalDevice); 2512 | 2513 | NK_FREE(physicalDevices); 2514 | 2515 | // select logical device 2516 | 2517 | NkVkQueueFamilyIndices queueFamilyIndices = 2518 | nkVkFindQueueFamilies(device->physicalDevice, vkSurface); 2519 | 2520 | VkDeviceQueueCreateInfo queueCreateInfos[2]; 2521 | uint32_t queueCount = 0; 2522 | float queuePriority = 1.0f; 2523 | 2524 | for (size_t i = 0; i < 2; i++) { 2525 | VkDeviceQueueCreateInfo info; 2526 | { 2527 | info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; 2528 | info.pNext = NK_NULL; 2529 | info.flags = 0; 2530 | info.queueFamilyIndex = *(&queueFamilyIndices.graphicsFamily + i); 2531 | info.queueCount = 1; 2532 | info.pQueuePriorities = &queuePriority; 2533 | } 2534 | queueCreateInfos[i] = info; 2535 | 2536 | queueCount = i + 1; 2537 | 2538 | if (queueFamilyIndices.graphicsFamily == queueFamilyIndices.presentFamily) { 2539 | break; 2540 | } 2541 | } 2542 | 2543 | VkDeviceCreateInfo createInfo; 2544 | { 2545 | createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; 2546 | createInfo.pNext = NK_NULL; 2547 | createInfo.flags = 0; 2548 | createInfo.queueCreateInfoCount = queueCount; 2549 | createInfo.pQueueCreateInfos = queueCreateInfos; 2550 | createInfo.enabledLayerCount = 0; // deprecated and ignored 2551 | createInfo.ppEnabledLayerNames = NK_NULL; // deprecated and ignored 2552 | createInfo.enabledExtensionCount = NkVkDeviceEnabledExtensionCount; 2553 | createInfo.ppEnabledExtensionNames = NkVkDeviceEnabledExtensionNames; 2554 | createInfo.pEnabledFeatures = NK_NULL; 2555 | } 2556 | 2557 | NK_CHECK_VK(vkCreateDevice(device->physicalDevice, &createInfo, NK_NULL, &device->device)); 2558 | 2559 | vkGetDeviceQueue(device->device, queueFamilyIndices.graphicsFamily, 0, &device->queue.queue); 2560 | 2561 | return device; 2562 | } 2563 | 2564 | // Methods of QuerySet 2565 | void nkDestroyQuerySet(NkQuerySet querySet) { 2566 | 2567 | } 2568 | 2569 | // Methods of Queue 2570 | void nkDestroyQueue(NkQueue queue) { 2571 | 2572 | } 2573 | 2574 | NkFence nkCreateFence(NkQueue queue, const NkFenceInfo* descriptor) { 2575 | 2576 | } 2577 | 2578 | void nkQueueSignal(NkQueue queue, NkFence fence, uint64_t signalValue) { 2579 | 2580 | } 2581 | 2582 | void nkQueueSubmit(NkQueue queue, uint32_t commandCount, const NkCommandBuffer* commands) { 2583 | 2584 | } 2585 | 2586 | void nkQueueWriteBuffer(NkQueue queue, NkBuffer buffer, uint64_t bufferOffset, const void* data, size_t size) { 2587 | 2588 | } 2589 | 2590 | void nkQueueWriteTexture(NkQueue queue, const NkTextureCopyView* destination, const void* data, size_t dataSize, const NkTextureDataLayout* dataLayout, const NkExtent3D* writeSize) { 2591 | 2592 | } 2593 | 2594 | // Methods of RenderBundleEncoder 2595 | void nkRenderBundleEncoderDraw(NkRenderBundleEncoder renderBundleEncoder, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) { 2596 | 2597 | } 2598 | 2599 | void nkRenderBundleEncoderDrawIndexed(NkRenderBundleEncoder renderBundleEncoder, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t baseVertex, uint32_t firstInstance) { 2600 | 2601 | } 2602 | 2603 | void nkRenderBundleEncoderDrawIndexedIndirect(NkRenderBundleEncoder renderBundleEncoder, NkBuffer indirectBuffer, uint64_t indirectOffset) { 2604 | 2605 | } 2606 | 2607 | void nkRenderBundleEncoderDrawIndirect(NkRenderBundleEncoder renderBundleEncoder, NkBuffer indirectBuffer, uint64_t indirectOffset) { 2608 | 2609 | } 2610 | 2611 | NkRenderBundle nkRenderBundleEncoderFinish(NkRenderBundleEncoder renderBundleEncoder) { 2612 | 2613 | } 2614 | 2615 | void nkRenderBundleEncoderInsertDebugMarker(NkRenderBundleEncoder renderBundleEncoder, const char* markerLabel) { 2616 | 2617 | } 2618 | 2619 | void nkRenderBundleEncoderPopDebugGroup(NkRenderBundleEncoder renderBundleEncoder) { 2620 | 2621 | } 2622 | 2623 | void nkRenderBundleEncoderPushDebugGroup(NkRenderBundleEncoder renderBundleEncoder, const char* groupLabel) { 2624 | 2625 | } 2626 | 2627 | void nkRenderBundleEncoderSetBindGroup(NkRenderBundleEncoder renderBundleEncoder, uint32_t groupIndex, NkBindGroup group, uint32_t dynamicOffsetCount, const uint32_t* dynamicOffsets) { 2628 | 2629 | } 2630 | 2631 | void nkRenderBundleEncoderSetIndexBuffer(NkRenderBundleEncoder renderBundleEncoder, NkBuffer buffer, NkIndexFormat format, uint64_t offset, uint64_t size) { 2632 | 2633 | } 2634 | 2635 | void nkRenderBundleEncoderSetPipeline(NkRenderBundleEncoder renderBundleEncoder, NkRenderPipeline pipeline) { 2636 | 2637 | } 2638 | 2639 | void nkRenderBundleEncoderSetVertexBuffer(NkRenderBundleEncoder renderBundleEncoder, uint32_t slot, NkBuffer buffer, uint64_t offset, uint64_t size) { 2640 | 2641 | } 2642 | 2643 | // Methods of RenderPipeline 2644 | void nkDestroyRenderPipeline(NkRenderPipeline renderPipeline) { 2645 | 2646 | NK_ASSERT(renderPipeline); 2647 | 2648 | NK_FREE(renderPipeline); 2649 | } 2650 | 2651 | NkBindGroupLayout nkRenderPipelineGetBindGroupLayout(NkRenderPipeline renderPipeline, uint32_t groupIndex) { 2652 | 2653 | } 2654 | 2655 | // Methods of Surface 2656 | void nkDestroySurface(NkSurface surface) { 2657 | 2658 | NK_ASSERT(surface); 2659 | 2660 | vkDestroySurfaceKHR(surface->instance, surface->surface, NK_NULL); 2661 | NK_FREE(surface); 2662 | } 2663 | 2664 | // Methods of SwapChain 2665 | void nkDestroySwapChain(NkSwapChain swapChain) { 2666 | 2667 | NK_ASSERT(swapChain); 2668 | 2669 | for (uint32_t i = 0; i < swapChain->swapChainImageCount; i++) { 2670 | vkDestroyImageView(swapChain->device, swapChain->swapChainTextureViews[i].imageView, NK_NULL); 2671 | } 2672 | NK_FREE(swapChain->swapChainTextureViews); 2673 | vkDestroySwapchainKHR(swapChain->device, swapChain->swapChain, NK_NULL); 2674 | NK_FREE(swapChain->swapChainImages); 2675 | NK_FREE(swapChain); 2676 | } 2677 | 2678 | NkTextureView nkSwapChainGetCurrentTextureView(NkSwapChain swapChain) { 2679 | 2680 | NK_ASSERT(swapChain); 2681 | NK_ASSERT(swapChain->swapChainTextureViews); 2682 | return &swapChain->swapChainTextureViews[swapChain->currentFrame]; 2683 | } 2684 | 2685 | void nkSwapChainPresent(NkSwapChain swapChain) { 2686 | 2687 | } 2688 | 2689 | // Methods of Texture 2690 | NkTextureView nkCreateTextureView(NkTexture texture, const NkTextureViewInfo* descriptor) { 2691 | 2692 | } 2693 | 2694 | void nkDestroyTexture(NkTexture texture) { 2695 | 2696 | } 2697 | 2698 | #endif // NK_VULKAN_IMPLEMENTATION 2699 | 2700 | #endif // NK_IMPLEMENTATION 2701 | 2702 | #endif // NEKO_H 2703 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | zlib/libpng license 2 | 3 | Copyright (c) 2020 Stuart Adams 4 | 5 | This software is provided 'as-is', without any express or implied warranty. 6 | In no event will the authors be held liable for any damages arising from the 7 | use of this software. 8 | 9 | Permission is granted to anyone to use this software for any purpose, 10 | including commercial applications, and to alter it and redistribute it 11 | freely, subject to the following restrictions: 12 | 13 | 1. The origin of this software must not be misrepresented; you must not 14 | claim that you wrote the original software. If you use this software in a 15 | product, an acknowledgment in the product documentation would be 16 | appreciated but is not required. 17 | 18 | 2. Altered source versions must be plainly marked as such, and must not 19 | be misrepresented as being the original software. 20 | 21 | 3. This notice may not be removed or altered from any source 22 | distribution. 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 🐾 Neko 2 | 3 |

4 | Neko 5 |

6 | 7 | [![License](https://img.shields.io/github/license/nyalloc/neko)](https://github.com/nyalloc/neko/blob/main/LICENSE) 8 | 9 | ## 🎮 A New Graphics API 10 | 11 | Neko is an early-stage 3D graphics API wrapper. It is implemented as a dependency-free [STB-style](https://github.com/nothings/stb/blob/master/docs/stb_howto.txt) header-only C library. It will sit on top of Vulkan, D3D12 and Metal and define a higher-level WebGPU-style interface. The intended users are developers who want to prototype or implement small game engines. Neko will bring the most important capabilities of modern low-level graphics APIs, but restore the joy of working with high-level APIs. Neko is influenced by the development of WebGPU and will develop alongside it. 12 | 13 | ## 🔬 Why C? 14 | * Easier integration with other languages 15 | * Easier integration into other projects 16 | * Adds only minimal size overhead to executables 17 | 18 | ## 🛠️ Current Work 19 | 20 | The Vulkan backend is currently under development. Vulkan is the most portable backend to target, so it is a worthwhile starting point. Development work is currently focused on Windows. Once the Vulkan backend is in good shape, development will shift to introduce Linux support while introducing testing and continuous integration. 21 | 22 | [![Twitter](https://img.shields.io/twitter/follow/nyalloc?label=follow)](https://twitter.com/intent/user?screen_name=nyalloc) 23 | [![GitHub](https://img.shields.io/github/followers/nyalloc?label=follow&style=social)](https://github.com/nyalloc) 24 | [![Nyalloc](https://img.shields.io/badge/nyalloc-blog-ff69b4?style=flat)](https://nyalloc.io) 25 | -------------------------------------------------------------------------------- /Resources/neko.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nyalloc/Neko/dda220509bb1802a89e3a3001696a177ac1e74a5/Resources/neko.png -------------------------------------------------------------------------------- /Samples/01_Triangle/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | add_executable(01_Triangle Triangle.c) 3 | 4 | add_shader_module(01_Triangle 5 | NAME TriangleVertex 6 | INPUT Triangle.hlsl 7 | ENTRY_POINT vs_main 8 | STAGE vs 9 | ) 10 | 11 | add_shader_module(01_Triangle 12 | NAME TriangleFragment 13 | INPUT Triangle.hlsl 14 | ENTRY_POINT ps_main 15 | STAGE ps 16 | ) 17 | 18 | target_link_libraries(01_Triangle PUBLIC 19 | SampleBase 20 | ) 21 | 22 | set_neko_compiler_options(01_Triangle) 23 | -------------------------------------------------------------------------------- /Samples/01_Triangle/Triangle.c: -------------------------------------------------------------------------------- 1 |  2 | #include 3 | #include 4 | #include 5 | #include 6 | 7 | typedef struct NkPositionColorVertex { 8 | NkFloat3 position; 9 | NkFloat4 color; 10 | } NkPositionColorVertex; 11 | 12 | static const NkPositionColorVertex vertices[] = { 13 | { .position = { 0.0f, 0.5f, 0.5f }, .color = { 1.0f, 0.0f, 0.0f, 1.0f } }, 14 | { .position = { 0.5f,-0.5f, 0.5f }, .color = { 0.0f, 1.0f, 0.0f, 1.0f } }, 15 | { .position = {-0.5f,-0.5f, 0.5f }, .color = { 0.0f, 0.0f, 1.0f, 1.0f } } 16 | }; 17 | 18 | int main() { 19 | 20 | const uint32_t windowHeight = 720; 21 | const uint32_t windowWidth = 1280; 22 | 23 | const NkSampleApp sample = nkCreateSampleApp(&(NkSampleAppInfo) { 24 | .width = windowWidth, 25 | .height = windowHeight, 26 | .title = "Neko: Triangle", 27 | }); 28 | 29 | const NkInstance instance = nkCreateInstance(); 30 | 31 | const NkNativeSurface nativeSurface = nkSampleAppGetNativeSurface(sample); 32 | 33 | const NkSurface surface = nkCreateSurface(instance, &(NkSurfaceInfo) { 34 | .native = nativeSurface 35 | }); 36 | 37 | const NkDevice device = nkCreateDevice(instance, surface); 38 | 39 | const NkQueue queue = nkDeviceGetDefaultQueue(device); 40 | 41 | const NkShaderModule vertexShader = nkCreateShaderModule(device, &(NkShaderModuleInfo) { 42 | .source = NkTriangleFragmentSource, 43 | .size = NkTriangleFragmentSourceSize 44 | }); 45 | 46 | const NkShaderModule pixelShader = nkCreateShaderModule(device, &(NkShaderModuleInfo) { 47 | .source = NkTriangleVertexSource, 48 | .size = NkTriangleVertexSourceSize 49 | }); 50 | 51 | const NkRenderPipeline renderPipeline = nkCreateRenderPipeline(device, &(NkRenderPipelineInfo) { 52 | .vertexStage = { .module = vertexShader, .entryPoint = "vertexMain" }, 53 | .fragmentStage = { .module = pixelShader, .entryPoint = "pixelMain" }, 54 | .primitiveTopology = NkPrimitiveTopology_TriangleList, 55 | .vertexState = &(NkVertexStateInfo) { 56 | .vertexBuffers = &(NkVertexBufferLayoutInfo) { 57 | .arrayStride = sizeof(NkPositionColorVertex), 58 | .stepMode = NkInputStepMode_Vertex, 59 | .attributes = (NkVertexAttributeInfo[]) { 60 | { .format = NkVertexFormat_Float4, .offset = 0, .shaderLocation = 0 }, 61 | { .format = NkVertexFormat_Float3, .offset = sizeof(NkFloat4), .shaderLocation = 1 } 62 | }, 63 | .attributeCount = 2 64 | }, 65 | .vertexBufferCount = 1 66 | } 67 | }); 68 | 69 | const NkBuffer vertexBuffer = nkCreateBuffer(device, &(NkBufferInfo) { 70 | .usage = NkBufferUsage_CopyDst | NkBufferUsage_Vertex, 71 | .size = sizeof vertices 72 | }); 73 | 74 | nkQueueWriteBuffer(queue, vertexBuffer, 0, vertices, sizeof vertices); 75 | 76 | const NkSwapChain swapChain = nkCreateSwapChain(device, surface, &(NkSwapChainInfo) { 77 | .width = windowWidth, 78 | .height = windowHeight 79 | }); 80 | 81 | while (nkSampleAppProcessEvents(sample, NULL)) { 82 | const NkTextureView frame = nkSwapChainGetCurrentTextureView(swapChain); 83 | 84 | const NkCommandEncoder encoder = nkCreateCommandEncoder(device); 85 | 86 | const NkRenderPassEncoder renderPass = nkCommandEncoderBeginRenderPass(encoder, &(NkRenderPassInfo) { 87 | .colorAttachments = &(NkRenderPassColorAttachmentInfo) { 88 | .attachment = frame, 89 | .loadOp = NkLoadOp_Clear, 90 | .storeOp = NkStoreOp_Store, 91 | .clearColor = { 0.0f, 0.0f, 0.0f, 1.0f } 92 | }, 93 | .colorAttachmentCount = 1 94 | }); 95 | 96 | nkRenderPassEncoderSetPipeline(renderPass, renderPipeline); 97 | nkRenderPassEncoderSetVertexBuffer(renderPass, 0, vertexBuffer, 0, 0); 98 | nkRenderPassEncoderDraw(renderPass, 3, 1, 0, 0); 99 | 100 | const NkCommandBuffer commandBuffer = nkCommandEncoderFinish(encoder); 101 | nkQueueSubmit(queue, 1, &commandBuffer); 102 | } 103 | 104 | nkDestroyBuffer(vertexBuffer); 105 | nkDestroySwapChain(swapChain); 106 | nkDestroyRenderPipeline(renderPipeline); 107 | nkDestroyShaderModule(vertexShader); 108 | nkDestroyShaderModule(pixelShader); 109 | nkDestroyDevice(device); 110 | nkDestroySurface(surface); 111 | nkDestroyInstance(instance); 112 | 113 | nkDestroySampleApp(sample); 114 | } 115 | -------------------------------------------------------------------------------- /Samples/01_Triangle/Triangle.hlsl: -------------------------------------------------------------------------------- 1 | 2 | /* vertex attributes go here to input to the vertex shader */ 3 | struct vs_in { 4 | float3 position_local : POS; 5 | }; 6 | 7 | /* outputs from vertex shader go here. can be interpolated to pixel shader */ 8 | struct vs_out { 9 | float4 position_clip : SV_POSITION; // required output of VS 10 | }; 11 | 12 | vs_out vs_main(vs_in input) { 13 | vs_out output = (vs_out)0; // zero the memory first 14 | output.position_clip = float4(input.position_local, 1.0); 15 | return output; 16 | } 17 | 18 | float4 ps_main(vs_out input) : SV_TARGET { 19 | return float4( 1.0, 0.0, 1.0, 1.0 ); // must return an RGBA colour 20 | } -------------------------------------------------------------------------------- /Samples/02_Cubes/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | add_executable(02_Cubes "main.c") 3 | target_link_libraries(02_Cubes PUBLIC SampleBase) 4 | -------------------------------------------------------------------------------- /Samples/02_Cubes/main.c: -------------------------------------------------------------------------------- 1 | 2 | #include 3 | #include 4 | 5 | int main() 6 | { 7 | 8 | } 9 | -------------------------------------------------------------------------------- /Samples/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.15) 2 | 3 | project(NekoSamples C) 4 | 5 | list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/Modules") 6 | include(ShaderCompiler) 7 | 8 | function(set_neko_compiler_options target) 9 | if(MSVC) 10 | target_compile_options(${target} PRIVATE /W4 /WX) 11 | else() 12 | target_compile_options(${target} PRIVATE -Wall -Wextra -pedantic -Werror) 13 | endif() 14 | endfunction() 15 | 16 | add_subdirectory(ShaderConductor) 17 | add_subdirectory(Neko) 18 | add_subdirectory(SampleBase) 19 | add_subdirectory(01_Triangle) 20 | add_subdirectory(02_Cubes) 21 | -------------------------------------------------------------------------------- /Samples/Modules/ShaderCompiler.cmake: -------------------------------------------------------------------------------- 1 | if(NEKO_GENERATE_MODE) 2 | function(WRAP_STRING) 3 | set(oneValueArgs VARIABLE AT_COLUMN) 4 | cmake_parse_arguments(WRAP_STRING "${options}" "${oneValueArgs}" "" ${ARGN}) 5 | 6 | string(LENGTH ${${WRAP_STRING_VARIABLE}} stringLength) 7 | math(EXPR offset "0") 8 | 9 | while(stringLength GREATER 0) 10 | 11 | if(stringLength GREATER ${WRAP_STRING_AT_COLUMN}) 12 | math(EXPR length "${WRAP_STRING_AT_COLUMN}") 13 | else() 14 | math(EXPR length "${stringLength}") 15 | endif() 16 | 17 | string(SUBSTRING ${${WRAP_STRING_VARIABLE}} ${offset} ${length} line) 18 | set(lines "${lines}\n${line}") 19 | 20 | math(EXPR stringLength "${stringLength} - ${length}") 21 | math(EXPR offset "${offset} + ${length}") 22 | endwhile() 23 | 24 | set(${WRAP_STRING_VARIABLE} "${lines}" PARENT_SCOPE) 25 | endfunction() 26 | 27 | file(READ ${INPUT_FILE} hexString HEX) 28 | string(LENGTH ${hexString} hexStringLength) 29 | 30 | # wraps the hex string into multiple lines at column 32(i.e. 16 bytes per line) 31 | wrap_string(VARIABLE hexString AT_COLUMN 32) 32 | math(EXPR arraySize "${hexStringLength} / 2") 33 | 34 | # adds '0x' prefix and comma suffix before and after every byte respectively 35 | string(REGEX REPLACE "([0-9a-f][0-9a-f])" "0x\\1, " arrayValues ${hexString}) 36 | 37 | # removes trailing comma 38 | string(REGEX REPLACE ", $" "" arrayValues ${arrayValues}) 39 | 40 | string(MAKE_C_IDENTIFIER "${SYMBOL}" SYMBOL) 41 | 42 | set(pragmaOnce "#pragma once") 43 | set(includeDirs "#include ") 44 | set(warning "// Warning: this file was generated by ShaderCompiler.cmake. Do not modify it!") 45 | set(arrayComment "// This header contains the ${TARGET} binary generated from ${SOURCE_FILE}") 46 | set(arrayDefinition "const uint8_t NK_ALIGN_AS(4) ${NAMESPACE}${SYMBOL}Source[] = { ${arrayValues} };") 47 | set(arraySizeDefinition "const uint32_t ${NAMESPACE}${SYMBOL}SourceSize = ${arraySize};") 48 | 49 | set(declarations "${pragmaOnce}\n\n${includeDirs}\n\n${warning}\n${arrayComment}\n\n${arrayDefinition}\n\n${arraySizeDefinition}\n\n") 50 | 51 | file(WRITE ${OUTPUT_FILE} "${declarations}") 52 | 53 | return() 54 | endif() 55 | 56 | set(NEKO_SHADER_SCRIPT "${CMAKE_CURRENT_LIST_FILE}" CACHE INTERNAL "Path to ShaderCompiler script") 57 | 58 | function(add_shader_module name) 59 | set(options) 60 | set(oneValueArgs NAME INPUT ENTRY_POINT STAGE) 61 | set(multiValueArgs) 62 | cmake_parse_arguments( 63 | SHADER 64 | "${options}" 65 | "${oneValueArgs}" 66 | "${multiValueArgs}" 67 | ${ARGN} 68 | ) 69 | 70 | get_filename_component(ShaderInput "${CMAKE_CURRENT_SOURCE_DIR}/${SHADER_INPUT}" ABSOLUTE) 71 | get_filename_component(ShaderHeader "${CMAKE_CURRENT_BINARY_DIR}/Neko/Shaders/${SHADER_NAME}.h" ABSOLUTE) 72 | get_filename_component(ShaderBin "${CMAKE_CURRENT_BINARY_DIR}/Neko/Shaders/${SHADER_NAME}.bin" ABSOLUTE) 73 | 74 | set_source_files_properties(${SHADER_INPUT} PROPERTIES VS_TOOL_OVERRIDE "None") 75 | 76 | add_custom_target("${SHADER_NAME}" ALL 77 | ${CMAKE_COMMAND} -E echo "Compiling shaders..." 78 | DEPENDS ${ShaderHeader} 79 | ) 80 | 81 | # TODO: change target depending on backend. Vulkan only for now. 82 | set(TARGET spirv) 83 | 84 | add_custom_command( 85 | OUTPUT ${ShaderHeader} 86 | COMMAND ${CMAKE_COMMAND} -E echo "Compiling shader ${ShaderInput}. Generating shader binary header: ${ShaderHeader}" 87 | COMMAND ShaderConductor -I ${ShaderInput} -O ${ShaderBin} -S ${SHADER_STAGE} -E ${SHADER_ENTRY_POINT} -T ${TARGET} 88 | COMMAND ${CMAKE_COMMAND} -DNEKO_GENERATE_MODE=TRUE -DNAMESPACE="Nk" -DTARGET=${TARGET} -DSOURCE_FILE=${ShaderInput} 89 | -DSYMBOL=${SHADER_NAME} -DINPUT_FILE=${ShaderBin} -DOUTPUT_FILE=${ShaderHeader} 90 | -P ${NEKO_SHADER_SCRIPT} 91 | COMMAND ${CMAKE_COMMAND} -E remove ${ShaderBin} 92 | DEPENDS ${SHADER_INPUT} 93 | ) 94 | 95 | target_sources(${name} PRIVATE ${SHADER_INPUT} ${ShaderHeader}) 96 | target_include_directories(${name} PUBLIC ${CMAKE_CURRENT_BINARY_DIR}) 97 | endfunction() 98 | -------------------------------------------------------------------------------- /Samples/Neko/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | add_library(Neko STATIC "Source/Neko.c") 3 | target_include_directories(Neko PUBLIC ../../Include) 4 | 5 | find_package(Vulkan REQUIRED) 6 | target_link_libraries(Neko PRIVATE Vulkan::Vulkan) 7 | target_compile_definitions(Neko PUBLIC NK_VULKAN_IMPLEMENTATION) 8 | -------------------------------------------------------------------------------- /Samples/Neko/Source/Neko.c: -------------------------------------------------------------------------------- 1 | #define NK_IMPLEMENTATION 2 | #include 3 | -------------------------------------------------------------------------------- /Samples/SampleBase/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | add_library(SampleBase) 3 | target_include_directories(SampleBase PUBLIC "Include" PRIVATE "Source") 4 | target_compile_definitions(SampleBase PRIVATE SDL_MAIN_HANDLED) 5 | find_package(SDL2 CONFIG REQUIRED) 6 | target_link_libraries(SampleBase PRIVATE SDL2::SDL2 SDL2::SDL2main) 7 | 8 | target_sources(SampleBase PRIVATE "Source/SampleBase.c") 9 | target_link_libraries(SampleBase PUBLIC Neko) 10 | -------------------------------------------------------------------------------- /Samples/SampleBase/Include/Neko/Sample.h: -------------------------------------------------------------------------------- 1 | #ifndef NK_SAMPLE_WINDOW_INCLUDE_GUARD 2 | #define NK_SAMPLE_WINDOW_INCLUDE_GUARD 3 | 4 | #include 5 | 6 | #ifdef __cplusplus 7 | extern "C" { 8 | #endif 9 | 10 | typedef struct NkSampleApp_* NkSampleApp; 11 | typedef struct NkSampleAppInfo { 12 | uint32_t width; 13 | uint32_t height; 14 | const char* title; 15 | } NkSampleAppInfo; 16 | 17 | typedef struct NkSampleAppResize { 18 | uint32_t width; 19 | uint32_t height; 20 | } NkSampleAppResize; 21 | 22 | typedef struct NkSampleAppState { 23 | const NkSampleAppResize* resize; 24 | } NkSampleAppState; 25 | 26 | typedef struct NkNativeSurface NkNativeSurface; 27 | typedef enum NkBool NkBool; 28 | typedef enum NkResult NkResult; 29 | 30 | NkSampleApp nkCreateSampleApp(const NkSampleAppInfo* desc); 31 | void nkDestroySampleApp(NkSampleApp sample); 32 | 33 | NkBool nkSampleAppProcessEvents(NkSampleApp sample, NkSampleAppState* state); 34 | NkNativeSurface nkSampleAppGetNativeSurface(NkSampleApp sample); 35 | 36 | #ifdef __cplusplus 37 | } 38 | #endif 39 | 40 | #endif /* NK_SAMPLE_WINDOW_INCLUDE_GUARD */ 41 | -------------------------------------------------------------------------------- /Samples/SampleBase/Source/SampleBase.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | 7 | struct NkSampleApp_ { 8 | SDL_Window* window; 9 | NkSampleAppState state; 10 | NkSampleAppResize resize; 11 | SDL_Event event; 12 | }; 13 | 14 | NkSampleApp nkCreateSampleApp(const NkSampleAppInfo* desc) { 15 | struct NkSampleApp_* newSample = (NkSampleApp)malloc(sizeof(struct NkSampleApp_)); 16 | assert(newSample); 17 | 18 | Uint32 flags = 0; 19 | 20 | #ifdef NK_BACKEND_VULKAN 21 | flags |= SDL_WINDOW_VULKAN; 22 | #endif 23 | 24 | newSample->window = SDL_CreateWindow( 25 | desc->title, 26 | SDL_WINDOWPOS_CENTERED, 27 | SDL_WINDOWPOS_CENTERED, 28 | desc->width, 29 | desc->height, 30 | flags 31 | ); 32 | 33 | assert(newSample->window); 34 | 35 | return newSample; 36 | } 37 | 38 | void nkDestroySampleApp(NkSampleApp sample) { 39 | SDL_DestroyWindow(sample->window); 40 | free(sample); 41 | } 42 | 43 | NkBool nkSampleAppProcessEvents(NkSampleApp sample, NkSampleAppState* state) { 44 | sample->state.resize = NULL; 45 | 46 | while (SDL_PollEvent(&sample->event)) { 47 | if (sample->event.type == SDL_QUIT) { 48 | return NkFalse; 49 | } 50 | } 51 | 52 | if (state) { 53 | *state = sample->state; 54 | } 55 | 56 | return NkTrue; 57 | } 58 | 59 | NkNativeSurface nkSampleAppGetNativeSurface(NkSampleApp sample) { 60 | NkNativeSurface nativeSurface; 61 | 62 | SDL_SysWMinfo wmInfo; 63 | SDL_VERSION(&wmInfo.version); 64 | SDL_GetWindowWMInfo(sample->window, &wmInfo); 65 | nativeSurface.hwnd = wmInfo.info.win.window; 66 | nativeSurface.hinstance = wmInfo.info.win.hinstance; 67 | 68 | return nativeSurface; 69 | } 70 | -------------------------------------------------------------------------------- /Samples/ShaderConductor/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | set(SHADER_CONDUCTOR_INSTALL_DIR "SHADER_CONDUCTOR-NOTFOUND" CACHE PATH "Path to a prebuilt Shader Conductor") 3 | 4 | # Create an import target for ShaderConductor so we can use the target name directly 5 | add_executable(ShaderConductor IMPORTED GLOBAL) 6 | 7 | if(CMAKE_HOST_WIN32) 8 | set(HOST_EXECUTABLE_SUFFIX ".exe" CACHE INTERNAL "") 9 | elseif(CMAKE_HOST_APPLE) 10 | set(HOST_EXECUTABLE_SUFFIX "" CACHE INTERNAL "") 11 | elseif(CMAKE_HOST_UNIX) 12 | set(HOST_EXECUTABLE_SUFFIX "" CACHE INTERNAL "") 13 | endif() 14 | 15 | set(ShaderConductor_BINARY_LOCATION "${SHADER_CONDUCTOR_INSTALL_DIR}/Bin/ShaderConductorCmd${HOST_EXECUTABLE_SUFFIX}") 16 | 17 | set_target_properties(ShaderConductor PROPERTIES IMPORTED_LOCATION "${ShaderConductor_BINARY_LOCATION}") --------------------------------------------------------------------------------