3 #extension GL_ARB_gpu_shader_int64: enable
\r
4 #extension GL_AMD_gpu_shader_half_float: enable
\r
5 #extension GL_AMD_gpu_shader_int16: enable
\r
6 #extension GL_AMD_shader_ballot: enable
\r
8 layout (local_size_x = 8, local_size_y = 8, local_size_z = 1) in;
\r
10 layout(binding = 0) buffer Buffers
\r
25 i = minInvocationsAMD(i);
\r
26 uv = minInvocationsAMD(uv);
\r
27 fv = minInvocationsAMD(fv);
\r
28 dv = minInvocationsAMD(dv);
\r
29 i64 = minInvocationsAMD(i64);
\r
30 u64v = minInvocationsAMD(u64v);
\r
31 f16v = minInvocationsAMD(f16v);
\r
32 i16v = minInvocationsAMD(i16v);
\r
33 u16 = minInvocationsAMD(u16);
\r
35 i = maxInvocationsAMD(i);
\r
36 uv = maxInvocationsAMD(uv);
\r
37 fv = maxInvocationsAMD(fv);
\r
38 dv = maxInvocationsAMD(dv);
\r
39 i64 = maxInvocationsAMD(i64);
\r
40 u64v = maxInvocationsAMD(u64v);
\r
41 f16v = maxInvocationsAMD(f16v);
\r
42 i16v = maxInvocationsAMD(i16v);
\r
43 u16 = maxInvocationsAMD(u16);
\r
45 i = addInvocationsAMD(i);
\r
46 uv = addInvocationsAMD(uv);
\r
47 fv = addInvocationsAMD(fv);
\r
48 dv = addInvocationsAMD(dv);
\r
49 i64 = addInvocationsAMD(i64);
\r
50 u64v = addInvocationsAMD(u64v);
\r
51 f16v = addInvocationsAMD(f16v);
\r
52 i16v = addInvocationsAMD(i16v);
\r
53 u16 = addInvocationsAMD(u16);
\r
55 i = minInvocationsNonUniformAMD(i);
\r
56 uv = minInvocationsNonUniformAMD(uv);
\r
57 fv = minInvocationsNonUniformAMD(fv);
\r
58 dv = minInvocationsNonUniformAMD(dv);
\r
59 i64 = minInvocationsNonUniformAMD(i64);
\r
60 u64v = minInvocationsNonUniformAMD(u64v);
\r
61 f16v = minInvocationsNonUniformAMD(f16v);
\r
62 i16v = minInvocationsNonUniformAMD(i16v);
\r
63 u16 = minInvocationsNonUniformAMD(u16);
\r
65 i = maxInvocationsNonUniformAMD(i);
\r
66 uv = maxInvocationsNonUniformAMD(uv);
\r
67 fv = maxInvocationsNonUniformAMD(fv);
\r
68 dv = maxInvocationsNonUniformAMD(dv);
\r
69 i64 = maxInvocationsNonUniformAMD(i64);
\r
70 u64v = maxInvocationsNonUniformAMD(u64v);
\r
71 f16v = maxInvocationsNonUniformAMD(f16v);
\r
72 i16v = maxInvocationsNonUniformAMD(i16v);
\r
73 u16 = maxInvocationsNonUniformAMD(u16);
\r
75 i = addInvocationsNonUniformAMD(i);
\r
76 uv = addInvocationsNonUniformAMD(uv);
\r
77 fv = addInvocationsNonUniformAMD(fv);
\r
78 dv = addInvocationsNonUniformAMD(dv);
\r
79 i64 = addInvocationsNonUniformAMD(i64);
\r
80 u64v = addInvocationsNonUniformAMD(u64v);
\r
81 f16v = addInvocationsNonUniformAMD(f16v);
\r
82 i16v = addInvocationsNonUniformAMD(i16v);
\r
83 u16 = addInvocationsNonUniformAMD(u16);
\r
85 i = minInvocationsInclusiveScanAMD(i);
\r
86 uv = minInvocationsInclusiveScanAMD(uv);
\r
87 fv = minInvocationsInclusiveScanAMD(fv);
\r
88 dv = minInvocationsInclusiveScanAMD(dv);
\r
89 i64 = minInvocationsInclusiveScanAMD(i64);
\r
90 u64v = minInvocationsInclusiveScanAMD(u64v);
\r
91 f16v = minInvocationsInclusiveScanAMD(f16v);
\r
92 i16v = minInvocationsInclusiveScanAMD(i16v);
\r
93 u16 = minInvocationsInclusiveScanAMD(u16);
\r
95 i = maxInvocationsInclusiveScanAMD(i);
\r
96 uv = maxInvocationsInclusiveScanAMD(uv);
\r
97 fv = maxInvocationsInclusiveScanAMD(fv);
\r
98 dv = maxInvocationsInclusiveScanAMD(dv);
\r
99 i64 = maxInvocationsInclusiveScanAMD(i64);
\r
100 u64v = maxInvocationsInclusiveScanAMD(u64v);
\r
101 f16v = maxInvocationsInclusiveScanAMD(f16v);
\r
102 i16v = maxInvocationsInclusiveScanAMD(i16v);
\r
103 u16 = maxInvocationsInclusiveScanAMD(u16);
\r
105 i = addInvocationsInclusiveScanAMD(i);
\r
106 uv = addInvocationsInclusiveScanAMD(uv);
\r
107 fv = addInvocationsInclusiveScanAMD(fv);
\r
108 dv = addInvocationsInclusiveScanAMD(dv);
\r
109 i64 = addInvocationsInclusiveScanAMD(i64);
\r
110 u64v = addInvocationsInclusiveScanAMD(u64v);
\r
111 f16v = addInvocationsInclusiveScanAMD(f16v);
\r
112 i16v = addInvocationsInclusiveScanAMD(i16v);
\r
113 u16 = addInvocationsInclusiveScanAMD(u16);
\r
115 i = minInvocationsExclusiveScanAMD(i);
\r
116 uv = minInvocationsExclusiveScanAMD(uv);
\r
117 fv = minInvocationsExclusiveScanAMD(fv);
\r
118 dv = minInvocationsExclusiveScanAMD(dv);
\r
119 i64 = minInvocationsExclusiveScanAMD(i64);
\r
120 u64v = minInvocationsExclusiveScanAMD(u64v);
\r
121 f16v = minInvocationsExclusiveScanAMD(f16v);
\r
122 i16v = minInvocationsExclusiveScanAMD(i16v);
\r
123 u16 = minInvocationsExclusiveScanAMD(u16);
\r
125 i = maxInvocationsExclusiveScanAMD(i);
\r
126 uv = maxInvocationsExclusiveScanAMD(uv);
\r
127 fv = maxInvocationsExclusiveScanAMD(fv);
\r
128 dv = maxInvocationsExclusiveScanAMD(dv);
\r
129 i64 = maxInvocationsExclusiveScanAMD(i64);
\r
130 u64v = maxInvocationsExclusiveScanAMD(u64v);
\r
131 f16v = maxInvocationsExclusiveScanAMD(f16v);
\r
132 i16v = maxInvocationsExclusiveScanAMD(i16v);
\r
133 u16 = maxInvocationsExclusiveScanAMD(u16);
\r
135 i = addInvocationsExclusiveScanAMD(i);
\r
136 uv = addInvocationsExclusiveScanAMD(uv);
\r
137 fv = addInvocationsExclusiveScanAMD(fv);
\r
138 dv = addInvocationsExclusiveScanAMD(dv);
\r
139 i64 = addInvocationsExclusiveScanAMD(i64);
\r
140 u64v = addInvocationsExclusiveScanAMD(u64v);
\r
141 f16v = addInvocationsExclusiveScanAMD(f16v);
\r
142 i16v = addInvocationsExclusiveScanAMD(i16v);
\r
143 u16 = addInvocationsExclusiveScanAMD(u16);
\r
145 i = minInvocationsInclusiveScanNonUniformAMD(i);
\r
146 uv = minInvocationsInclusiveScanNonUniformAMD(uv);
\r
147 fv = minInvocationsInclusiveScanNonUniformAMD(fv);
\r
148 dv = minInvocationsInclusiveScanNonUniformAMD(dv);
\r
149 i64 = minInvocationsInclusiveScanNonUniformAMD(i64);
\r
150 u64v = minInvocationsInclusiveScanNonUniformAMD(u64v);
\r
151 f16v = minInvocationsInclusiveScanNonUniformAMD(f16v);
\r
152 i16v = minInvocationsInclusiveScanNonUniformAMD(i16v);
\r
153 u16 = minInvocationsInclusiveScanNonUniformAMD(u16);
\r
155 i = maxInvocationsInclusiveScanNonUniformAMD(i);
\r
156 uv = maxInvocationsInclusiveScanNonUniformAMD(uv);
\r
157 fv = maxInvocationsInclusiveScanNonUniformAMD(fv);
\r
158 dv = maxInvocationsInclusiveScanNonUniformAMD(dv);
\r
159 i64 = maxInvocationsInclusiveScanNonUniformAMD(i64);
\r
160 u64v = maxInvocationsInclusiveScanNonUniformAMD(u64v);
\r
161 f16v = maxInvocationsInclusiveScanNonUniformAMD(f16v);
\r
162 i16v = maxInvocationsInclusiveScanNonUniformAMD(i16v);
\r
163 u16 = maxInvocationsInclusiveScanNonUniformAMD(u16);
\r
165 i = addInvocationsInclusiveScanNonUniformAMD(i);
\r
166 uv = addInvocationsInclusiveScanNonUniformAMD(uv);
\r
167 fv = addInvocationsInclusiveScanNonUniformAMD(fv);
\r
168 dv = addInvocationsInclusiveScanNonUniformAMD(dv);
\r
169 i64 = addInvocationsInclusiveScanNonUniformAMD(i64);
\r
170 u64v = addInvocationsInclusiveScanNonUniformAMD(u64v);
\r
171 f16v = addInvocationsInclusiveScanNonUniformAMD(f16v);
\r
172 i16v = addInvocationsInclusiveScanNonUniformAMD(i16v);
\r
173 u16 = addInvocationsInclusiveScanNonUniformAMD(u16);
\r
175 i = minInvocationsExclusiveScanNonUniformAMD(i);
\r
176 uv = minInvocationsExclusiveScanNonUniformAMD(uv);
\r
177 fv = minInvocationsExclusiveScanNonUniformAMD(fv);
\r
178 dv = minInvocationsExclusiveScanNonUniformAMD(dv);
\r
179 i64 = minInvocationsExclusiveScanNonUniformAMD(i64);
\r
180 u64v = minInvocationsExclusiveScanNonUniformAMD(u64v);
\r
181 f16v = minInvocationsExclusiveScanNonUniformAMD(f16v);
\r
182 i16v = minInvocationsExclusiveScanNonUniformAMD(i16v);
\r
183 u16 = minInvocationsExclusiveScanNonUniformAMD(u16);
\r
185 i = maxInvocationsExclusiveScanNonUniformAMD(i);
\r
186 uv = maxInvocationsExclusiveScanNonUniformAMD(uv);
\r
187 fv = maxInvocationsExclusiveScanNonUniformAMD(fv);
\r
188 dv = maxInvocationsExclusiveScanNonUniformAMD(dv);
\r
189 i64 = maxInvocationsExclusiveScanNonUniformAMD(i64);
\r
190 u64v = maxInvocationsExclusiveScanNonUniformAMD(u64v);
\r
191 f16v = maxInvocationsExclusiveScanNonUniformAMD(f16v);
\r
192 i16v = maxInvocationsExclusiveScanNonUniformAMD(i16v);
\r
193 u16 = maxInvocationsExclusiveScanNonUniformAMD(u16);
\r
195 i = addInvocationsExclusiveScanNonUniformAMD(i);
\r
196 uv = addInvocationsExclusiveScanNonUniformAMD(uv);
\r
197 fv = addInvocationsExclusiveScanNonUniformAMD(fv);
\r
198 dv = addInvocationsExclusiveScanNonUniformAMD(dv);
\r
199 i64 = addInvocationsExclusiveScanNonUniformAMD(i64);
\r
200 u64v = addInvocationsExclusiveScanNonUniformAMD(u64v);
\r
201 f16v = addInvocationsExclusiveScanNonUniformAMD(f16v);
\r
202 i16v = addInvocationsExclusiveScanNonUniformAMD(i16v);
\r
203 u16 = addInvocationsExclusiveScanNonUniformAMD(u16);
\r