1 #ifndef NPV_VK_API_USAGE_H
2 #define NPV_VK_API_USAGE_H
4 * this is public domain without any warranties of any kind
8 * this is the simplification and taylorization of vk api for the specific
11 #define FATALVK(fmt, ...) FATAL("vulkan:" fmt, ##__VA_ARGS__)
12 #define WARNINGVK(fmt, ...) WARNING("vulkan:" fmt, ##__VA_ARGS__)
13 #define POUTVK(fmt, ...) POUT("vulkan:" fmt, ##__VA_ARGS__)
15 #define IF_FATALVK(fmt, ...) \
17 FATALVK(fmt, ##__VA_ARGS__)
18 /*----------------------------------------------------------------------------*/
19 #define vk_get_dev_q() \
20 npv_vk_surf_p.dev.dl_vk_get_dev_q(npv_vk_surf_p.dev.vk, npv_vk_surf_p.dev.phydev.q_fam, 0, &npv_vk_surf_p.dev.q)
22 #define vk_create_cp(info) \
23 r = npv_vk_surf_p.dev.dl_vk_create_cp(npv_vk_surf_p.dev.vk, info, 0, &npv_vk_surf_p.dev.cp)
25 #define vk_create_swpchn(info) \
26 r = npv_vk_surf_p.dev.dl_vk_create_swpchn(npv_vk_surf_p.dev.vk, info, 0, &npv_vk_surf_p.dev.swpchn.vk)
28 #define vk_destroy_swpchn(swpchn) \
29 npv_vk_surf_p.dev.dl_vk_destroy_swpchn(npv_vk_surf_p.dev.vk, swpchn, 0)
31 #define vk_get_swpchn_imgs() \
32 r = npv_vk_surf_p.dev.dl_vk_get_swpchn_imgs(npv_vk_surf_p.dev.vk, npv_vk_surf_p.dev.swpchn.vk, &npv_vk_surf_p.dev.swpchn.imgs_n, npv_vk_surf_p.dev.swpchn.imgs)
34 #define vk_create_img(info, img) \
35 r = npv_vk_surf_p.dev.dl_vk_create_img(npv_vk_surf_p.dev.vk, info, 0, img)
37 #define vk_destroy_img(img) \
38 npv_vk_surf_p.dev.dl_vk_destroy_img(npv_vk_surf_p.dev.vk, img, 0)
40 #define vk_get_img_mem_rqmts(...) \
41 r = npv_vk_surf_p.dev.dl_vk_get_img_mem_rqmts(npv_vk_surf_p.dev.vk,##__VA_ARGS__)
43 #define vk_alloc_mem(info, dev_mem) \
44 r = npv_vk_surf_p.dev.dl_vk_alloc_mem(npv_vk_surf_p.dev.vk, info, 0, dev_mem)
46 #define vk_free_mem(dev_mem) \
47 npv_vk_surf_p.dev.dl_vk_free_mem(npv_vk_surf_p.dev.vk, dev_mem, 0)
49 #define vk_bind_img_mem(infos) \
50 r = npv_vk_surf_p.dev.dl_vk_bind_img_mem(npv_vk_surf_p.dev.vk, 1, infos)
52 #define vk_map_mem(dev_mem, data) \
53 r = npv_vk_surf_p.dev.dl_vk_map_mem(npv_vk_surf_p.dev.vk, dev_mem, 0, vk_whole_sz, 0, data)
55 #define vk_unmap_mem(dev_mem) \
56 npv_vk_surf_p.dev.dl_vk_unmap_mem(npv_vk_surf_p.dev.vk, dev_mem)
58 #define vk_alloc_cbs(info) \
59 r = npv_vk_surf_p.dev.dl_vk_alloc_cbs(npv_vk_surf_p.dev.vk, info, npv_vk_surf_p.dev.cbs)
61 #define vk_begin_cb(...) \
62 r = npv_vk_surf_p.dev.dl_vk_begin_cb(__VA_ARGS__)
64 #define vk_end_cb(...) \
65 r = npv_vk_surf_p.dev.dl_vk_end_cb(__VA_ARGS__)
67 #define vk_cmd_pl_barrier(cb, b) \
68 npv_vk_surf_p.dev.dl_vk_cmd_pl_barrier(cb, vk_pl_stage_top_of_pipe_bit, vk_pl_stage_top_of_pipe_bit, 0, 0, 0, 0, 0, 1, b)
70 #define vk_q_submit(info) \
71 r = npv_vk_surf_p.dev.dl_vk_q_submit(npv_vk_surf_p.dev.q, 1, info, 0)
73 #define vk_q_wait_idle() \
74 r = npv_vk_surf_p.dev.dl_vk_q_wait_idle(npv_vk_surf_p.dev.q)
76 #define vk_get_img_subrsrc_layout(...) \
77 npv_vk_surf_p.dev.dl_vk_get_img_subrsrc_layout(npv_vk_surf_p.dev.vk, ##__VA_ARGS__)
79 #define vk_acquire_next_img(...) \
80 r = npv_vk_surf_p.dev.dl_vk_acquire_next_img(npv_vk_surf_p.dev.vk,##__VA_ARGS__)
82 #define vk_reset_cb(cb) \
83 r = npv_vk_surf_p.dev.dl_vk_reset_cb(cb, 0)
85 /* linear filtering if scaling happens */
86 /* TODO: should be tested prior to usage: fmt feature of src img should have the vk_fmt_feature_sampled_img_filt_linear_bit */
87 #define vk_cmd_blit_img(cb, src_img, dst_img, region) \
88 npv_vk_surf_p.dev.dl_vk_cmd_blit_img(cb, src_img, vk_img_layout_general, dst_img, vk_img_layout_present, 1, region, 1)
90 #define vk_q_present(info) \
91 r = npv_vk_surf_p.dev.dl_vk_q_present(npv_vk_surf_p.dev.q, info)
93 #define vk_create_sem(info, sem) \
94 r = npv_vk_surf_p.dev.dl_vk_create_sem(npv_vk_surf_p.dev.vk, info, 0, sem)
96 #define vk_cmd_clr_color_img npv_vk_surf_p.dev.dl_vk_cmd_clr_color_img
97 /******************************************************************************/
98 /* cherry picked from nyanvk/syms_global.h */
99 #define VK_GLOBAL_SYMS \
100 static void *(*dl_vk_get_instance_proc_addr)(struct vk_instance_t *instance, u8 *name); \
101 static void *(*dl_vk_get_dev_proc_addr)(struct vk_dev_t *dev, u8 *name); \
102 static s32 (*dl_vk_enumerate_instance_version)(u32 *version); \
103 static s32 (*dl_vk_enumerate_instance_layer_props)( \
105 struct vk_layer_props_t *props); \
106 static s32 (*dl_vk_enumerate_instance_ext_props)( \
109 struct vk_ext_props_t *props); \
110 static s32 (*dl_vk_create_instance)( \
111 struct vk_instance_create_info_t *info, \
113 struct vk_instance_t **instance); \
114 static s32 (*dl_vk_enumerate_phydevs)( \
115 struct vk_instance_t *instance, \
117 struct vk_phydev_t **phydevs); \
118 static s32 (*dl_vk_enumerate_dev_ext_props)( \
119 struct vk_phydev_t *phydev, \
122 struct vk_ext_props_t *props); \
123 static void (*dl_vk_get_phydev_props)( \
124 struct vk_phydev_t *phydev, \
125 struct vk_phydev_props_t *props); \
126 static s32 (*dl_vk_create_dev)( \
127 struct vk_phydev_t *phydev, \
128 struct vk_dev_create_info_t *create_info, \
130 struct vk_dev_t **dev); \
131 static void (*dl_vk_get_phydev_q_fam_props)( \
132 struct vk_phydev_t *phydev, \
133 u32 *q_fam_props_n, \
134 struct vk_q_fam_props_t *props); \
135 static s32 (*dl_vk_create_xcb_surf)( \
136 struct vk_instance_t *instance, \
137 struct vk_xcb_surf_create_info_t *info, \
139 struct vk_surf_t **surf); \
140 static void (*dl_vk_destroy_surf)(\
141 struct vk_instance_t *instance,\
142 struct vk_surf_t *surf,\
144 static s32 (*dl_vk_get_phydev_surf_support)( \
145 struct vk_phydev_t *phydev, \
147 struct vk_surf_t *surf, \
149 static s32 (*dl_vk_get_phydev_surf_texel_mem_blk_confs)( \
150 struct vk_phydev_t *phydev, \
151 struct vk_phydev_surf_info_t *info, \
153 struct vk_surf_texel_mem_blk_conf_t *confs); \
154 static void (*dl_vk_get_phydev_mem_props)( \
155 struct vk_phydev_t *phydev, \
156 struct vk_phydev_mem_props_t *props); \
157 static s32 (*dl_vk_get_phydev_surf_caps)( \
158 struct vk_phydev_t *phydev, \
159 struct vk_phydev_surf_info_t *info, \
160 struct vk_surf_caps_t *caps); \
161 static s32 (*dl_vk_get_phydev_surf_present_modes)( \
162 struct vk_phydev_t *phydev, \
163 struct vk_surf_t *surf, \
166 /******************************************************************************/
167 #define vk_get_instance_proc_addr dl_vk_get_instance_proc_addr
169 #define vk_get_dev_proc_addr dl_vk_get_dev_proc_addr
171 #define vk_enumerate_instance_version \
172 r = dl_vk_enumerate_instance_version
174 #define vk_enumerate_instance_layer_props \
175 r = dl_vk_enumerate_instance_layer_props
177 #define vk_enumerate_instance_ext_props(...) \
178 r = dl_vk_enumerate_instance_ext_props(0,##__VA_ARGS__)
180 #define vk_create_instance(info) \
181 r = dl_vk_create_instance(info, 0, &npv_vk_instance_l)
183 #define vk_enumerate_phydevs(...) \
184 r = dl_vk_enumerate_phydevs(npv_vk_instance_l,##__VA_ARGS__)
186 #define vk_enumerate_dev_ext_props(phydev, props_n, props) \
187 r = dl_vk_enumerate_dev_ext_props(phydev, 0, props_n, props)
189 #define vk_get_phydev_props dl_vk_get_phydev_props
191 #define vk_create_dev(info) \
192 r = dl_vk_create_dev(npv_vk_surf_p.dev.phydev.vk, info, 0, &npv_vk_surf_p.dev.vk)
194 #define vk_get_phydev_q_fam_props dl_vk_get_phydev_q_fam_props
196 #define vk_create_xcb_surf(info) \
197 r = dl_vk_create_xcb_surf(npv_vk_instance_l, info, 0, &npv_vk_surf_p.vk)
199 #define vk_get_phydev_surf_support(phydev, q_fam, supported) \
200 r = dl_vk_get_phydev_surf_support(phydev, q_fam, npv_vk_surf_p.vk, supported)
202 #define vk_get_phydev_surf_texel_mem_blk_confs(info, ...) \
203 r = dl_vk_get_phydev_surf_texel_mem_blk_confs(npv_vk_surf_p.dev.phydev.vk, info, ##__VA_ARGS__)
205 #define vk_get_phydev_mem_props dl_vk_get_phydev_mem_props
207 #define vk_get_phydev_surf_caps(info, caps) \
208 r = dl_vk_get_phydev_surf_caps(npv_vk_surf_p.dev.phydev.vk, info, caps)
210 #define vk_get_phydev_surf_present_modes() \
211 r = dl_vk_get_phydev_surf_present_modes(npv_vk_surf_p.dev.phydev.vk, npv_vk_surf_p.vk, &npv_vk_tmp_present_modes_n_l, npv_vk_tmp_present_modes_l)