Some documentation fixes.
[BearSSL] / src / symcipher / aes_pwr8_cbcdec.c
1 /*
2 * Copyright (c) 2017 Thomas Pornin <pornin@bolet.org>
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining
5 * a copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sublicense, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be
13 * included in all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
17 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
18 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
19 * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
20 * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
21 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24
25 #define BR_POWER_ASM_MACROS 1
26 #include "inner.h"
27
28 #if BR_POWER8
29
30 /* see bearssl_block.h */
31 void
32 br_aes_pwr8_cbcdec_init(br_aes_pwr8_cbcdec_keys *ctx,
33 const void *key, size_t len)
34 {
35 ctx->vtable = &br_aes_pwr8_cbcdec_vtable;
36 ctx->num_rounds = br_aes_pwr8_keysched(ctx->skey.skni, key, len);
37 }
38
39 static void
40 cbcdec_128(const unsigned char *sk,
41 const unsigned char *iv, unsigned char *buf, size_t num_blocks)
42 {
43 long cc0, cc1, cc2, cc3;
44
45 #if BR_POWER8_LE
46 static const uint32_t idx2be[] = {
47 0x03020100, 0x07060504, 0x0B0A0908, 0x0F0E0D0C
48 };
49 #endif
50
51 cc0 = 0;
52 cc1 = 16;
53 cc2 = 32;
54 cc3 = 48;
55 asm volatile (
56
57 /*
58 * Load subkeys into v0..v10
59 */
60 lxvw4x(32, %[cc0], %[sk])
61 addi(%[cc0], %[cc0], 16)
62 lxvw4x(33, %[cc0], %[sk])
63 addi(%[cc0], %[cc0], 16)
64 lxvw4x(34, %[cc0], %[sk])
65 addi(%[cc0], %[cc0], 16)
66 lxvw4x(35, %[cc0], %[sk])
67 addi(%[cc0], %[cc0], 16)
68 lxvw4x(36, %[cc0], %[sk])
69 addi(%[cc0], %[cc0], 16)
70 lxvw4x(37, %[cc0], %[sk])
71 addi(%[cc0], %[cc0], 16)
72 lxvw4x(38, %[cc0], %[sk])
73 addi(%[cc0], %[cc0], 16)
74 lxvw4x(39, %[cc0], %[sk])
75 addi(%[cc0], %[cc0], 16)
76 lxvw4x(40, %[cc0], %[sk])
77 addi(%[cc0], %[cc0], 16)
78 lxvw4x(41, %[cc0], %[sk])
79 addi(%[cc0], %[cc0], 16)
80 lxvw4x(42, %[cc0], %[sk])
81 li(%[cc0], 0)
82
83 #if BR_POWER8_LE
84 /*
85 * v15 = constant for byteswapping words
86 */
87 lxvw4x(47, 0, %[idx2be])
88 #endif
89 /*
90 * Load IV into v24.
91 */
92 lxvw4x(56, 0, %[iv])
93 #if BR_POWER8_LE
94 vperm(24, 24, 24, 15)
95 #endif
96
97 mtctr(%[num_blocks])
98 label(loop)
99 /*
100 * Load next ciphertext words in v16..v19. Also save them
101 * in v20..v23.
102 */
103 lxvw4x(48, %[cc0], %[buf])
104 lxvw4x(49, %[cc1], %[buf])
105 lxvw4x(50, %[cc2], %[buf])
106 lxvw4x(51, %[cc3], %[buf])
107 #if BR_POWER8_LE
108 vperm(16, 16, 16, 15)
109 vperm(17, 17, 17, 15)
110 vperm(18, 18, 18, 15)
111 vperm(19, 19, 19, 15)
112 #endif
113 vand(20, 16, 16)
114 vand(21, 17, 17)
115 vand(22, 18, 18)
116 vand(23, 19, 19)
117
118 /*
119 * Decrypt the blocks.
120 */
121 vxor(16, 16, 10)
122 vxor(17, 17, 10)
123 vxor(18, 18, 10)
124 vxor(19, 19, 10)
125 vncipher(16, 16, 9)
126 vncipher(17, 17, 9)
127 vncipher(18, 18, 9)
128 vncipher(19, 19, 9)
129 vncipher(16, 16, 8)
130 vncipher(17, 17, 8)
131 vncipher(18, 18, 8)
132 vncipher(19, 19, 8)
133 vncipher(16, 16, 7)
134 vncipher(17, 17, 7)
135 vncipher(18, 18, 7)
136 vncipher(19, 19, 7)
137 vncipher(16, 16, 6)
138 vncipher(17, 17, 6)
139 vncipher(18, 18, 6)
140 vncipher(19, 19, 6)
141 vncipher(16, 16, 5)
142 vncipher(17, 17, 5)
143 vncipher(18, 18, 5)
144 vncipher(19, 19, 5)
145 vncipher(16, 16, 4)
146 vncipher(17, 17, 4)
147 vncipher(18, 18, 4)
148 vncipher(19, 19, 4)
149 vncipher(16, 16, 3)
150 vncipher(17, 17, 3)
151 vncipher(18, 18, 3)
152 vncipher(19, 19, 3)
153 vncipher(16, 16, 2)
154 vncipher(17, 17, 2)
155 vncipher(18, 18, 2)
156 vncipher(19, 19, 2)
157 vncipher(16, 16, 1)
158 vncipher(17, 17, 1)
159 vncipher(18, 18, 1)
160 vncipher(19, 19, 1)
161 vncipherlast(16, 16, 0)
162 vncipherlast(17, 17, 0)
163 vncipherlast(18, 18, 0)
164 vncipherlast(19, 19, 0)
165
166 /*
167 * XOR decrypted blocks with IV / previous block.
168 */
169 vxor(16, 16, 24)
170 vxor(17, 17, 20)
171 vxor(18, 18, 21)
172 vxor(19, 19, 22)
173
174 /*
175 * Store back result (with byteswap)
176 */
177 #if BR_POWER8_LE
178 vperm(16, 16, 16, 15)
179 vperm(17, 17, 17, 15)
180 vperm(18, 18, 18, 15)
181 vperm(19, 19, 19, 15)
182 #endif
183 stxvw4x(48, %[cc0], %[buf])
184 stxvw4x(49, %[cc1], %[buf])
185 stxvw4x(50, %[cc2], %[buf])
186 stxvw4x(51, %[cc3], %[buf])
187
188 /*
189 * Fourth encrypted block is IV for next run.
190 */
191 vand(24, 23, 23)
192
193 addi(%[buf], %[buf], 64)
194
195 bdnz(loop)
196
197 : [cc0] "+b" (cc0), [cc1] "+b" (cc1), [cc2] "+b" (cc2), [cc3] "+b" (cc3),
198 [buf] "+b" (buf)
199 : [sk] "b" (sk), [iv] "b" (iv), [num_blocks] "b" (num_blocks >> 2)
200 #if BR_POWER8_LE
201 , [idx2be] "b" (idx2be)
202 #endif
203 : "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9",
204 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19",
205 "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29",
206 "ctr", "memory"
207 );
208 }
209
210 static void
211 cbcdec_192(const unsigned char *sk,
212 const unsigned char *iv, unsigned char *buf, size_t num_blocks)
213 {
214 long cc0, cc1, cc2, cc3;
215
216 #if BR_POWER8_LE
217 static const uint32_t idx2be[] = {
218 0x03020100, 0x07060504, 0x0B0A0908, 0x0F0E0D0C
219 };
220 #endif
221
222 cc0 = 0;
223 cc1 = 16;
224 cc2 = 32;
225 cc3 = 48;
226 asm volatile (
227
228 /*
229 * Load subkeys into v0..v12
230 */
231 lxvw4x(32, %[cc0], %[sk])
232 addi(%[cc0], %[cc0], 16)
233 lxvw4x(33, %[cc0], %[sk])
234 addi(%[cc0], %[cc0], 16)
235 lxvw4x(34, %[cc0], %[sk])
236 addi(%[cc0], %[cc0], 16)
237 lxvw4x(35, %[cc0], %[sk])
238 addi(%[cc0], %[cc0], 16)
239 lxvw4x(36, %[cc0], %[sk])
240 addi(%[cc0], %[cc0], 16)
241 lxvw4x(37, %[cc0], %[sk])
242 addi(%[cc0], %[cc0], 16)
243 lxvw4x(38, %[cc0], %[sk])
244 addi(%[cc0], %[cc0], 16)
245 lxvw4x(39, %[cc0], %[sk])
246 addi(%[cc0], %[cc0], 16)
247 lxvw4x(40, %[cc0], %[sk])
248 addi(%[cc0], %[cc0], 16)
249 lxvw4x(41, %[cc0], %[sk])
250 addi(%[cc0], %[cc0], 16)
251 lxvw4x(42, %[cc0], %[sk])
252 addi(%[cc0], %[cc0], 16)
253 lxvw4x(43, %[cc0], %[sk])
254 addi(%[cc0], %[cc0], 16)
255 lxvw4x(44, %[cc0], %[sk])
256 li(%[cc0], 0)
257
258 #if BR_POWER8_LE
259 /*
260 * v15 = constant for byteswapping words
261 */
262 lxvw4x(47, 0, %[idx2be])
263 #endif
264 /*
265 * Load IV into v24.
266 */
267 lxvw4x(56, 0, %[iv])
268 #if BR_POWER8_LE
269 vperm(24, 24, 24, 15)
270 #endif
271
272 mtctr(%[num_blocks])
273 label(loop)
274 /*
275 * Load next ciphertext words in v16..v19. Also save them
276 * in v20..v23.
277 */
278 lxvw4x(48, %[cc0], %[buf])
279 lxvw4x(49, %[cc1], %[buf])
280 lxvw4x(50, %[cc2], %[buf])
281 lxvw4x(51, %[cc3], %[buf])
282 #if BR_POWER8_LE
283 vperm(16, 16, 16, 15)
284 vperm(17, 17, 17, 15)
285 vperm(18, 18, 18, 15)
286 vperm(19, 19, 19, 15)
287 #endif
288 vand(20, 16, 16)
289 vand(21, 17, 17)
290 vand(22, 18, 18)
291 vand(23, 19, 19)
292
293 /*
294 * Decrypt the blocks.
295 */
296 vxor(16, 16, 12)
297 vxor(17, 17, 12)
298 vxor(18, 18, 12)
299 vxor(19, 19, 12)
300 vncipher(16, 16, 11)
301 vncipher(17, 17, 11)
302 vncipher(18, 18, 11)
303 vncipher(19, 19, 11)
304 vncipher(16, 16, 10)
305 vncipher(17, 17, 10)
306 vncipher(18, 18, 10)
307 vncipher(19, 19, 10)
308 vncipher(16, 16, 9)
309 vncipher(17, 17, 9)
310 vncipher(18, 18, 9)
311 vncipher(19, 19, 9)
312 vncipher(16, 16, 8)
313 vncipher(17, 17, 8)
314 vncipher(18, 18, 8)
315 vncipher(19, 19, 8)
316 vncipher(16, 16, 7)
317 vncipher(17, 17, 7)
318 vncipher(18, 18, 7)
319 vncipher(19, 19, 7)
320 vncipher(16, 16, 6)
321 vncipher(17, 17, 6)
322 vncipher(18, 18, 6)
323 vncipher(19, 19, 6)
324 vncipher(16, 16, 5)
325 vncipher(17, 17, 5)
326 vncipher(18, 18, 5)
327 vncipher(19, 19, 5)
328 vncipher(16, 16, 4)
329 vncipher(17, 17, 4)
330 vncipher(18, 18, 4)
331 vncipher(19, 19, 4)
332 vncipher(16, 16, 3)
333 vncipher(17, 17, 3)
334 vncipher(18, 18, 3)
335 vncipher(19, 19, 3)
336 vncipher(16, 16, 2)
337 vncipher(17, 17, 2)
338 vncipher(18, 18, 2)
339 vncipher(19, 19, 2)
340 vncipher(16, 16, 1)
341 vncipher(17, 17, 1)
342 vncipher(18, 18, 1)
343 vncipher(19, 19, 1)
344 vncipherlast(16, 16, 0)
345 vncipherlast(17, 17, 0)
346 vncipherlast(18, 18, 0)
347 vncipherlast(19, 19, 0)
348
349 /*
350 * XOR decrypted blocks with IV / previous block.
351 */
352 vxor(16, 16, 24)
353 vxor(17, 17, 20)
354 vxor(18, 18, 21)
355 vxor(19, 19, 22)
356
357 /*
358 * Store back result (with byteswap)
359 */
360 #if BR_POWER8_LE
361 vperm(16, 16, 16, 15)
362 vperm(17, 17, 17, 15)
363 vperm(18, 18, 18, 15)
364 vperm(19, 19, 19, 15)
365 #endif
366 stxvw4x(48, %[cc0], %[buf])
367 stxvw4x(49, %[cc1], %[buf])
368 stxvw4x(50, %[cc2], %[buf])
369 stxvw4x(51, %[cc3], %[buf])
370
371 /*
372 * Fourth encrypted block is IV for next run.
373 */
374 vand(24, 23, 23)
375
376 addi(%[buf], %[buf], 64)
377
378 bdnz(loop)
379
380 : [cc0] "+b" (cc0), [cc1] "+b" (cc1), [cc2] "+b" (cc2), [cc3] "+b" (cc3),
381 [buf] "+b" (buf)
382 : [sk] "b" (sk), [iv] "b" (iv), [num_blocks] "b" (num_blocks >> 2)
383 #if BR_POWER8_LE
384 , [idx2be] "b" (idx2be)
385 #endif
386 : "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9",
387 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19",
388 "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29",
389 "ctr", "memory"
390 );
391 }
392
393 static void
394 cbcdec_256(const unsigned char *sk,
395 const unsigned char *iv, unsigned char *buf, size_t num_blocks)
396 {
397 long cc0, cc1, cc2, cc3;
398
399 #if BR_POWER8_LE
400 static const uint32_t idx2be[] = {
401 0x03020100, 0x07060504, 0x0B0A0908, 0x0F0E0D0C
402 };
403 #endif
404
405 cc0 = 0;
406 cc1 = 16;
407 cc2 = 32;
408 cc3 = 48;
409 asm volatile (
410
411 /*
412 * Load subkeys into v0..v14
413 */
414 lxvw4x(32, %[cc0], %[sk])
415 addi(%[cc0], %[cc0], 16)
416 lxvw4x(33, %[cc0], %[sk])
417 addi(%[cc0], %[cc0], 16)
418 lxvw4x(34, %[cc0], %[sk])
419 addi(%[cc0], %[cc0], 16)
420 lxvw4x(35, %[cc0], %[sk])
421 addi(%[cc0], %[cc0], 16)
422 lxvw4x(36, %[cc0], %[sk])
423 addi(%[cc0], %[cc0], 16)
424 lxvw4x(37, %[cc0], %[sk])
425 addi(%[cc0], %[cc0], 16)
426 lxvw4x(38, %[cc0], %[sk])
427 addi(%[cc0], %[cc0], 16)
428 lxvw4x(39, %[cc0], %[sk])
429 addi(%[cc0], %[cc0], 16)
430 lxvw4x(40, %[cc0], %[sk])
431 addi(%[cc0], %[cc0], 16)
432 lxvw4x(41, %[cc0], %[sk])
433 addi(%[cc0], %[cc0], 16)
434 lxvw4x(42, %[cc0], %[sk])
435 addi(%[cc0], %[cc0], 16)
436 lxvw4x(43, %[cc0], %[sk])
437 addi(%[cc0], %[cc0], 16)
438 lxvw4x(44, %[cc0], %[sk])
439 addi(%[cc0], %[cc0], 16)
440 lxvw4x(45, %[cc0], %[sk])
441 addi(%[cc0], %[cc0], 16)
442 lxvw4x(46, %[cc0], %[sk])
443 li(%[cc0], 0)
444
445 #if BR_POWER8_LE
446 /*
447 * v15 = constant for byteswapping words
448 */
449 lxvw4x(47, 0, %[idx2be])
450 #endif
451 /*
452 * Load IV into v24.
453 */
454 lxvw4x(56, 0, %[iv])
455 #if BR_POWER8_LE
456 vperm(24, 24, 24, 15)
457 #endif
458
459 mtctr(%[num_blocks])
460 label(loop)
461 /*
462 * Load next ciphertext words in v16..v19. Also save them
463 * in v20..v23.
464 */
465 lxvw4x(48, %[cc0], %[buf])
466 lxvw4x(49, %[cc1], %[buf])
467 lxvw4x(50, %[cc2], %[buf])
468 lxvw4x(51, %[cc3], %[buf])
469 #if BR_POWER8_LE
470 vperm(16, 16, 16, 15)
471 vperm(17, 17, 17, 15)
472 vperm(18, 18, 18, 15)
473 vperm(19, 19, 19, 15)
474 #endif
475 vand(20, 16, 16)
476 vand(21, 17, 17)
477 vand(22, 18, 18)
478 vand(23, 19, 19)
479
480 /*
481 * Decrypt the blocks.
482 */
483 vxor(16, 16, 14)
484 vxor(17, 17, 14)
485 vxor(18, 18, 14)
486 vxor(19, 19, 14)
487 vncipher(16, 16, 13)
488 vncipher(17, 17, 13)
489 vncipher(18, 18, 13)
490 vncipher(19, 19, 13)
491 vncipher(16, 16, 12)
492 vncipher(17, 17, 12)
493 vncipher(18, 18, 12)
494 vncipher(19, 19, 12)
495 vncipher(16, 16, 11)
496 vncipher(17, 17, 11)
497 vncipher(18, 18, 11)
498 vncipher(19, 19, 11)
499 vncipher(16, 16, 10)
500 vncipher(17, 17, 10)
501 vncipher(18, 18, 10)
502 vncipher(19, 19, 10)
503 vncipher(16, 16, 9)
504 vncipher(17, 17, 9)
505 vncipher(18, 18, 9)
506 vncipher(19, 19, 9)
507 vncipher(16, 16, 8)
508 vncipher(17, 17, 8)
509 vncipher(18, 18, 8)
510 vncipher(19, 19, 8)
511 vncipher(16, 16, 7)
512 vncipher(17, 17, 7)
513 vncipher(18, 18, 7)
514 vncipher(19, 19, 7)
515 vncipher(16, 16, 6)
516 vncipher(17, 17, 6)
517 vncipher(18, 18, 6)
518 vncipher(19, 19, 6)
519 vncipher(16, 16, 5)
520 vncipher(17, 17, 5)
521 vncipher(18, 18, 5)
522 vncipher(19, 19, 5)
523 vncipher(16, 16, 4)
524 vncipher(17, 17, 4)
525 vncipher(18, 18, 4)
526 vncipher(19, 19, 4)
527 vncipher(16, 16, 3)
528 vncipher(17, 17, 3)
529 vncipher(18, 18, 3)
530 vncipher(19, 19, 3)
531 vncipher(16, 16, 2)
532 vncipher(17, 17, 2)
533 vncipher(18, 18, 2)
534 vncipher(19, 19, 2)
535 vncipher(16, 16, 1)
536 vncipher(17, 17, 1)
537 vncipher(18, 18, 1)
538 vncipher(19, 19, 1)
539 vncipherlast(16, 16, 0)
540 vncipherlast(17, 17, 0)
541 vncipherlast(18, 18, 0)
542 vncipherlast(19, 19, 0)
543
544 /*
545 * XOR decrypted blocks with IV / previous block.
546 */
547 vxor(16, 16, 24)
548 vxor(17, 17, 20)
549 vxor(18, 18, 21)
550 vxor(19, 19, 22)
551
552 /*
553 * Store back result (with byteswap)
554 */
555 #if BR_POWER8_LE
556 vperm(16, 16, 16, 15)
557 vperm(17, 17, 17, 15)
558 vperm(18, 18, 18, 15)
559 vperm(19, 19, 19, 15)
560 #endif
561 stxvw4x(48, %[cc0], %[buf])
562 stxvw4x(49, %[cc1], %[buf])
563 stxvw4x(50, %[cc2], %[buf])
564 stxvw4x(51, %[cc3], %[buf])
565
566 /*
567 * Fourth encrypted block is IV for next run.
568 */
569 vand(24, 23, 23)
570
571 addi(%[buf], %[buf], 64)
572
573 bdnz(loop)
574
575 : [cc0] "+b" (cc0), [cc1] "+b" (cc1), [cc2] "+b" (cc2), [cc3] "+b" (cc3),
576 [buf] "+b" (buf)
577 : [sk] "b" (sk), [iv] "b" (iv), [num_blocks] "b" (num_blocks >> 2)
578 #if BR_POWER8_LE
579 , [idx2be] "b" (idx2be)
580 #endif
581 : "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9",
582 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19",
583 "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29",
584 "ctr", "memory"
585 );
586 }
587
588 /* see bearssl_block.h */
589 void
590 br_aes_pwr8_cbcdec_run(const br_aes_pwr8_cbcdec_keys *ctx,
591 void *iv, void *data, size_t len)
592 {
593 unsigned char nextiv[16];
594 unsigned char *buf;
595
596 if (len == 0) {
597 return;
598 }
599 buf = data;
600 memcpy(nextiv, buf + len - 16, 16);
601 if (len >= 64) {
602 size_t num_blocks;
603 unsigned char tmp[16];
604
605 num_blocks = (len >> 4) & ~(size_t)3;
606 memcpy(tmp, buf + (num_blocks << 4) - 16, 16);
607 switch (ctx->num_rounds) {
608 case 10:
609 cbcdec_128(ctx->skey.skni, iv, buf, num_blocks);
610 break;
611 case 12:
612 cbcdec_192(ctx->skey.skni, iv, buf, num_blocks);
613 break;
614 default:
615 cbcdec_256(ctx->skey.skni, iv, buf, num_blocks);
616 break;
617 }
618 buf += num_blocks << 4;
619 len &= 63;
620 memcpy(iv, tmp, 16);
621 }
622 if (len > 0) {
623 unsigned char tmp[64];
624
625 memcpy(tmp, buf, len);
626 memset(tmp + len, 0, (sizeof tmp) - len);
627 switch (ctx->num_rounds) {
628 case 10:
629 cbcdec_128(ctx->skey.skni, iv, tmp, 4);
630 break;
631 case 12:
632 cbcdec_192(ctx->skey.skni, iv, tmp, 4);
633 break;
634 default:
635 cbcdec_256(ctx->skey.skni, iv, tmp, 4);
636 break;
637 }
638 memcpy(buf, tmp, len);
639 }
640 memcpy(iv, nextiv, 16);
641 }
642
643 /* see bearssl_block.h */
644 const br_block_cbcdec_class br_aes_pwr8_cbcdec_vtable = {
645 sizeof(br_aes_pwr8_cbcdec_keys),
646 16,
647 4,
648 (void (*)(const br_block_cbcdec_class **, const void *, size_t))
649 &br_aes_pwr8_cbcdec_init,
650 (void (*)(const br_block_cbcdec_class *const *, void *, void *, size_t))
651 &br_aes_pwr8_cbcdec_run
652 };
653
654 /* see bearssl_block.h */
655 const br_block_cbcdec_class *
656 br_aes_pwr8_cbcdec_get_vtable(void)
657 {
658 return br_aes_pwr8_supported() ? &br_aes_pwr8_cbcdec_vtable : NULL;
659 }
660
661 #else
662
663 /* see bearssl_block.h */
664 const br_block_cbcdec_class *
665 br_aes_pwr8_cbcdec_get_vtable(void)
666 {
667 return NULL;
668 }
669
670 #endif