1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
|
# AArch32 Neon data-processing instruction descriptions
#
# Copyright (c) 2020 Linaro, Ltd
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, see <http://www.gnu.org/licenses/>.
#
# This file is processed by scripts/decodetree.py
#
# VFP/Neon register fields; same as vfp.decode
%vm_dp 5:1 0:4
%vn_dp 7:1 16:4
%vd_dp 22:1 12:4
# Encodings for Neon data processing instructions where the T32 encoding
# is a simple transformation of the A32 encoding.
# More specifically, this file covers instructions where the A32 encoding is
# 0b1111_001p_qqqq_qqqq_qqqq_qqqq_qqqq_qqqq
# and the T32 encoding is
# 0b111p_1111_qqqq_qqqq_qqqq_qqqq_qqqq_qqqq
# This file works on the A32 encoding only; calling code for T32 has to
# transform the insn into the A32 version first.
######################################################################
# 3-reg-same grouping:
# 1111 001 U 0 D sz:2 Vn:4 Vd:4 opc:4 N Q M op Vm:4
######################################################################
&3same vm vn vd q size
@3same .... ... . . . size:2 .... .... .... . q:1 . . .... \
&3same vm=%vm_dp vn=%vn_dp vd=%vd_dp
@3same_q0 .... ... . . . size:2 .... .... .... . 0 . . .... \
&3same vm=%vm_dp vn=%vn_dp vd=%vd_dp q=0
# For FP insns the high bit of 'size' is used as part of opcode decode,
# and the 'size' bit is 0 for 32-bit float and 1 for 16-bit float.
# This converts this encoding to the same MO_8/16/32/64 values that the
# integer neon insns use.
%3same_fp_size 20:1 !function=neon_3same_fp_size
@3same_fp .... ... . . . . . .... .... .... . q:1 . . .... \
&3same vm=%vm_dp vn=%vn_dp vd=%vd_dp size=%3same_fp_size
@3same_fp_q0 .... ... . . . . . .... .... .... . 0 . . .... \
&3same vm=%vm_dp vn=%vn_dp vd=%vd_dp q=0 size=%3same_fp_size
VHADD_S_3s 1111 001 0 0 . .. .... .... 0000 . . . 0 .... @3same
VHADD_U_3s 1111 001 1 0 . .. .... .... 0000 . . . 0 .... @3same
VQADD_S_3s 1111 001 0 0 . .. .... .... 0000 . . . 1 .... @3same
VQADD_U_3s 1111 001 1 0 . .. .... .... 0000 . . . 1 .... @3same
VRHADD_S_3s 1111 001 0 0 . .. .... .... 0001 . . . 0 .... @3same
VRHADD_U_3s 1111 001 1 0 . .. .... .... 0001 . . . 0 .... @3same
@3same_logic .... ... . . . .. .... .... .... . q:1 .. .... \
&3same vm=%vm_dp vn=%vn_dp vd=%vd_dp size=0
VAND_3s 1111 001 0 0 . 00 .... .... 0001 ... 1 .... @3same_logic
VBIC_3s 1111 001 0 0 . 01 .... .... 0001 ... 1 .... @3same_logic
VORR_3s 1111 001 0 0 . 10 .... .... 0001 ... 1 .... @3same_logic
VORN_3s 1111 001 0 0 . 11 .... .... 0001 ... 1 .... @3same_logic
VEOR_3s 1111 001 1 0 . 00 .... .... 0001 ... 1 .... @3same_logic
VBSL_3s 1111 001 1 0 . 01 .... .... 0001 ... 1 .... @3same_logic
VBIT_3s 1111 001 1 0 . 10 .... .... 0001 ... 1 .... @3same_logic
VBIF_3s 1111 001 1 0 . 11 .... .... 0001 ... 1 .... @3same_logic
VHSUB_S_3s 1111 001 0 0 . .. .... .... 0010 . . . 0 .... @3same
VHSUB_U_3s 1111 001 1 0 . .. .... .... 0010 . . . 0 .... @3same
VQSUB_S_3s 1111 001 0 0 . .. .... .... 0010 . . . 1 .... @3same
VQSUB_U_3s 1111 001 1 0 . .. .... .... 0010 . . . 1 .... @3same
VCGT_S_3s 1111 001 0 0 . .. .... .... 0011 . . . 0 .... @3same
VCGT_U_3s 1111 001 1 0 . .. .... .... 0011 . . . 0 .... @3same
VCGE_S_3s 1111 001 0 0 . .. .... .... 0011 . . . 1 .... @3same
VCGE_U_3s 1111 001 1 0 . .. .... .... 0011 . . . 1 .... @3same
# The _rev suffix indicates that Vn and Vm are reversed. This is
# the case for shifts. In the Arm ARM these insns are documented
# with the Vm and Vn fields in their usual places, but in the
# assembly the operands are listed "backwards", ie in the order
# Dd, Dm, Dn where other insns use Dd, Dn, Dm. For QEMU we choose
# to consider Vm and Vn as being in different fields in the insn,
# which allows us to avoid special-casing shifts in the trans_
# function code. We would otherwise need to manually swap the operands
# over to call Neon helper functions that are shared with AArch64,
# which does not have this odd reversed-operand situation.
@3same_rev .... ... . . . size:2 .... .... .... . q:1 . . .... \
&3same vn=%vm_dp vm=%vn_dp vd=%vd_dp
VSHL_S_3s 1111 001 0 0 . .. .... .... 0100 . . . 0 .... @3same_rev
VSHL_U_3s 1111 001 1 0 . .. .... .... 0100 . . . 0 .... @3same_rev
# Insns operating on 64-bit elements (size!=0b11 handled elsewhere)
# The _rev suffix indicates that Vn and Vm are reversed (as explained
# by the comment for the @3same_rev format).
@3same_64_rev .... ... . . . 11 .... .... .... . q:1 . . .... \
&3same vm=%vn_dp vn=%vm_dp vd=%vd_dp size=3
{
VQSHL_S64_3s 1111 001 0 0 . .. .... .... 0100 . . . 1 .... @3same_64_rev
VQSHL_S_3s 1111 001 0 0 . .. .... .... 0100 . . . 1 .... @3same_rev
}
{
VQSHL_U64_3s 1111 001 1 0 . .. .... .... 0100 . . . 1 .... @3same_64_rev
VQSHL_U_3s 1111 001 1 0 . .. .... .... 0100 . . . 1 .... @3same_rev
}
{
VRSHL_S64_3s 1111 001 0 0 . .. .... .... 0101 . . . 0 .... @3same_64_rev
VRSHL_S_3s 1111 001 0 0 . .. .... .... 0101 . . . 0 .... @3same_rev
}
{
VRSHL_U64_3s 1111 001 1 0 . .. .... .... 0101 . . . 0 .... @3same_64_rev
VRSHL_U_3s 1111 001 1 0 . .. .... .... 0101 . . . 0 .... @3same_rev
}
{
VQRSHL_S64_3s 1111 001 0 0 . .. .... .... 0101 . . . 1 .... @3same_64_rev
VQRSHL_S_3s 1111 001 0 0 . .. .... .... 0101 . . . 1 .... @3same_rev
}
{
VQRSHL_U64_3s 1111 001 1 0 . .. .... .... 0101 . . . 1 .... @3same_64_rev
VQRSHL_U_3s 1111 001 1 0 . .. .... .... 0101 . . . 1 .... @3same_rev
}
VMAX_S_3s 1111 001 0 0 . .. .... .... 0110 . . . 0 .... @3same
VMAX_U_3s 1111 001 1 0 . .. .... .... 0110 . . . 0 .... @3same
VMIN_S_3s 1111 001 0 0 . .. .... .... 0110 . . . 1 .... @3same
VMIN_U_3s 1111 001 1 0 . .. .... .... 0110 . . . 1 .... @3same
VABD_S_3s 1111 001 0 0 . .. .... .... 0111 . . . 0 .... @3same
VABD_U_3s 1111 001 1 0 . .. .... .... 0111 . . . 0 .... @3same
VABA_S_3s 1111 001 0 0 . .. .... .... 0111 . . . 1 .... @3same
VABA_U_3s 1111 001 1 0 . .. .... .... 0111 . . . 1 .... @3same
VADD_3s 1111 001 0 0 . .. .... .... 1000 . . . 0 .... @3same
VSUB_3s 1111 001 1 0 . .. .... .... 1000 . . . 0 .... @3same
VTST_3s 1111 001 0 0 . .. .... .... 1000 . . . 1 .... @3same
VCEQ_3s 1111 001 1 0 . .. .... .... 1000 . . . 1 .... @3same
VMLA_3s 1111 001 0 0 . .. .... .... 1001 . . . 0 .... @3same
VMLS_3s 1111 001 1 0 . .. .... .... 1001 . . . 0 .... @3same
VMUL_3s 1111 001 0 0 . .. .... .... 1001 . . . 1 .... @3same
VMUL_p_3s 1111 001 1 0 . .. .... .... 1001 . . . 1 .... @3same
VPMAX_S_3s 1111 001 0 0 . .. .... .... 1010 . . . 0 .... @3same_q0
VPMAX_U_3s 1111 001 1 0 . .. .... .... 1010 . . . 0 .... @3same_q0
VPMIN_S_3s 1111 001 0 0 . .. .... .... 1010 . . . 1 .... @3same_q0
VPMIN_U_3s 1111 001 1 0 . .. .... .... 1010 . . . 1 .... @3same_q0
VQDMULH_3s 1111 001 0 0 . .. .... .... 1011 . . . 0 .... @3same
VQRDMULH_3s 1111 001 1 0 . .. .... .... 1011 . . . 0 .... @3same
VPADD_3s 1111 001 0 0 . .. .... .... 1011 . . . 1 .... @3same_q0
VQRDMLAH_3s 1111 001 1 0 . .. .... .... 1011 ... 1 .... @3same
@3same_crypto .... .... .... .... .... .... .... .... \
&3same vm=%vm_dp vn=%vn_dp vd=%vd_dp size=0 q=1
SHA1C_3s 1111 001 0 0 . 00 .... .... 1100 . 1 . 0 .... @3same_crypto
SHA1P_3s 1111 001 0 0 . 01 .... .... 1100 . 1 . 0 .... @3same_crypto
SHA1M_3s 1111 001 0 0 . 10 .... .... 1100 . 1 . 0 .... @3same_crypto
SHA1SU0_3s 1111 001 0 0 . 11 .... .... 1100 . 1 . 0 .... @3same_crypto
SHA256H_3s 1111 001 1 0 . 00 .... .... 1100 . 1 . 0 .... @3same_crypto
SHA256H2_3s 1111 001 1 0 . 01 .... .... 1100 . 1 . 0 .... @3same_crypto
SHA256SU1_3s 1111 001 1 0 . 10 .... .... 1100 . 1 . 0 .... @3same_crypto
VFMA_fp_3s 1111 001 0 0 . 0 . .... .... 1100 ... 1 .... @3same_fp
VFMS_fp_3s 1111 001 0 0 . 1 . .... .... 1100 ... 1 .... @3same_fp
VQRDMLSH_3s 1111 001 1 0 . .. .... .... 1100 ... 1 .... @3same
VADD_fp_3s 1111 001 0 0 . 0 . .... .... 1101 ... 0 .... @3same_fp
VSUB_fp_3s 1111 001 0 0 . 1 . .... .... 1101 ... 0 .... @3same_fp
VPADD_fp_3s 1111 001 1 0 . 0 . .... .... 1101 ... 0 .... @3same_fp_q0
VABD_fp_3s 1111 001 1 0 . 1 . .... .... 1101 ... 0 .... @3same_fp
VMLA_fp_3s 1111 001 0 0 . 0 . .... .... 1101 ... 1 .... @3same_fp
VMLS_fp_3s 1111 001 0 0 . 1 . .... .... 1101 ... 1 .... @3same_fp
VMUL_fp_3s 1111 001 1 0 . 0 . .... .... 1101 ... 1 .... @3same_fp
VCEQ_fp_3s 1111 001 0 0 . 0 . .... .... 1110 ... 0 .... @3same_fp
VCGE_fp_3s 1111 001 1 0 . 0 . .... .... 1110 ... 0 .... @3same_fp
VACGE_fp_3s 1111 001 1 0 . 0 . .... .... 1110 ... 1 .... @3same_fp
VCGT_fp_3s 1111 001 1 0 . 1 . .... .... 1110 ... 0 .... @3same_fp
VACGT_fp_3s 1111 001 1 0 . 1 . .... .... 1110 ... 1 .... @3same_fp
VMAX_fp_3s 1111 001 0 0 . 0 . .... .... 1111 ... 0 .... @3same_fp
VMIN_fp_3s 1111 001 0 0 . 1 . .... .... 1111 ... 0 .... @3same_fp
VPMAX_fp_3s 1111 001 1 0 . 0 . .... .... 1111 ... 0 .... @3same_fp_q0
VPMIN_fp_3s 1111 001 1 0 . 1 . .... .... 1111 ... 0 .... @3same_fp_q0
VRECPS_fp_3s 1111 001 0 0 . 0 . .... .... 1111 ... 1 .... @3same_fp
VRSQRTS_fp_3s 1111 001 0 0 . 1 . .... .... 1111 ... 1 .... @3same_fp
VMAXNM_fp_3s 1111 001 1 0 . 0 . .... .... 1111 ... 1 .... @3same_fp
VMINNM_fp_3s 1111 001 1 0 . 1 . .... .... 1111 ... 1 .... @3same_fp
######################################################################
# 2-reg-and-shift grouping:
# 1111 001 U 1 D immH:3 immL:3 Vd:4 opc:4 L Q M 1 Vm:4
######################################################################
&2reg_shift vm vd q shift size
# Right shifts are encoded as N - shift, where N is the element size in bits.
%neon_rshift_i6 16:6 !function=rsub_64
%neon_rshift_i5 16:5 !function=rsub_32
%neon_rshift_i4 16:4 !function=rsub_16
%neon_rshift_i3 16:3 !function=rsub_8
@2reg_shr_d .... ... . . . ...... .... .... 1 q:1 . . .... \
&2reg_shift vm=%vm_dp vd=%vd_dp size=3 shift=%neon_rshift_i6
@2reg_shr_s .... ... . . . 1 ..... .... .... 0 q:1 . . .... \
&2reg_shift vm=%vm_dp vd=%vd_dp size=2 shift=%neon_rshift_i5
@2reg_shr_h .... ... . . . 01 .... .... .... 0 q:1 . . .... \
&2reg_shift vm=%vm_dp vd=%vd_dp size=1 shift=%neon_rshift_i4
@2reg_shr_b .... ... . . . 001 ... .... .... 0 q:1 . . .... \
&2reg_shift vm=%vm_dp vd=%vd_dp size=0 shift=%neon_rshift_i3
@2reg_shl_d .... ... . . . shift:6 .... .... 1 q:1 . . .... \
&2reg_shift vm=%vm_dp vd=%vd_dp size=3
@2reg_shl_s .... ... . . . 1 shift:5 .... .... 0 q:1 . . .... \
&2reg_shift vm=%vm_dp vd=%vd_dp size=2
@2reg_shl_h .... ... . . . 01 shift:4 .... .... 0 q:1 . . .... \
&2reg_shift vm=%vm_dp vd=%vd_dp size=1
@2reg_shl_b .... ... . . . 001 shift:3 .... .... 0 q:1 . . .... \
&2reg_shift vm=%vm_dp vd=%vd_dp size=0
# Narrowing right shifts: here the Q bit is part of the opcode decode
@2reg_shrn_d .... ... . . . 1 ..... .... .... 0 . . . .... \
&2reg_shift vm=%vm_dp vd=%vd_dp size=3 q=0 \
shift=%neon_rshift_i5
@2reg_shrn_s .... ... . . . 01 .... .... .... 0 . . . .... \
&2reg_shift vm=%vm_dp vd=%vd_dp size=2 q=0 \
shift=%neon_rshift_i4
@2reg_shrn_h .... ... . . . 001 ... .... .... 0 . . . .... \
&2reg_shift vm=%vm_dp vd=%vd_dp size=1 q=0 \
shift=%neon_rshift_i3
# Long left shifts: again Q is part of opcode decode
@2reg_shll_s .... ... . . . 1 shift:5 .... .... 0 . . . .... \
&2reg_shift vm=%vm_dp vd=%vd_dp size=2 q=0
@2reg_shll_h .... ... . . . 01 shift:4 .... .... 0 . . . .... \
&2reg_shift vm=%vm_dp vd=%vd_dp size=1 q=0
@2reg_shll_b .... ... . . . 001 shift:3 .... .... 0 . . . .... \
&2reg_shift vm=%vm_dp vd=%vd_dp size=0 q=0
@2reg_vcvt .... ... . . . 1 ..... .... .... . q:1 . . .... \
&2reg_shift vm=%vm_dp vd=%vd_dp size=2 shift=%neon_rshift_i5
@2reg_vcvt_f16 .... ... . . . 11 .... .... .... . q:1 . . .... \
&2reg_shift vm=%vm_dp vd=%vd_dp size=1 shift=%neon_rshift_i4
VSHR_S_2sh 1111 001 0 1 . ...... .... 0000 . . . 1 .... @2reg_shr_d
VSHR_S_2sh 1111 001 0 1 . ...... .... 0000 . . . 1 .... @2reg_shr_s
VSHR_S_2sh 1111 001 0 1 . ...... .... 0000 . . . 1 .... @2reg_shr_h
VSHR_S_2sh 1111 001 0 1 . ...... .... 0000 . . . 1 .... @2reg_shr_b
VSHR_U_2sh 1111 001 1 1 . ...... .... 0000 . . . 1 .... @2reg_shr_d
VSHR_U_2sh 1111 001 1 1 . ...... .... 0000 . . . 1 .... @2reg_shr_s
VSHR_U_2sh 1111 001 1 1 . ...... .... 0000 . . . 1 .... @2reg_shr_h
VSHR_U_2sh 1111 001 1 1 . ...... .... 0000 . . . 1 .... @2reg_shr_b
VSRA_S_2sh 1111 001 0 1 . ...... .... 0001 . . . 1 .... @2reg_shr_d
VSRA_S_2sh 1111 001 0 1 . ...... .... 0001 . . . 1 .... @2reg_shr_s
VSRA_S_2sh 1111 001 0 1 . ...... .... 0001 . . . 1 .... @2reg_shr_h
VSRA_S_2sh 1111 001 0 1 . ...... .... 0001 . . . 1 .... @2reg_shr_b
VSRA_U_2sh 1111 001 1 1 . ...... .... 0001 . . . 1 .... @2reg_shr_d
VSRA_U_2sh 1111 001 1 1 . ...... .... 0001 . . . 1 .... @2reg_shr_s
VSRA_U_2sh 1111 001 1 1 . ...... .... 0001 . . . 1 .... @2reg_shr_h
VSRA_U_2sh 1111 001 1 1 . ...... .... 0001 . . . 1 .... @2reg_shr_b
VRSHR_S_2sh 1111 001 0 1 . ...... .... 0010 . . . 1 .... @2reg_shr_d
VRSHR_S_2sh 1111 001 0 1 . ...... .... 0010 . . . 1 .... @2reg_shr_s
VRSHR_S_2sh 1111 001 0 1 . ...... .... 0010 . . . 1 .... @2reg_shr_h
VRSHR_S_2sh 1111 001 0 1 . ...... .... 0010 . . . 1 .... @2reg_shr_b
VRSHR_U_2sh 1111 001 1 1 . ...... .... 0010 . . . 1 .... @2reg_shr_d
VRSHR_U_2sh 1111 001 1 1 . ...... .... 0010 . . . 1 .... @2reg_shr_s
VRSHR_U_2sh 1111 001 1 1 . ...... .... 0010 . . . 1 .... @2reg_shr_h
VRSHR_U_2sh 1111 001 1 1 . ...... .... 0010 . . . 1 .... @2reg_shr_b
VRSRA_S_2sh 1111 001 0 1 . ...... .... 0011 . . . 1 .... @2reg_shr_d
VRSRA_S_2sh 1111 001 0 1 . ...... .... 0011 . . . 1 .... @2reg_shr_s
VRSRA_S_2sh 1111 001 0 1 . ...... .... 0011 . . . 1 .... @2reg_shr_h
VRSRA_S_2sh 1111 001 0 1 . ...... .... 0011 . . . 1 .... @2reg_shr_b
VRSRA_U_2sh 1111 001 1 1 . ...... .... 0011 . . . 1 .... @2reg_shr_d
VRSRA_U_2sh 1111 001 1 1 . ...... .... 0011 . . . 1 .... @2reg_shr_s
VRSRA_U_2sh 1111 001 1 1 . ...... .... 0011 . . . 1 .... @2reg_shr_h
VRSRA_U_2sh 1111 001 1 1 . ...... .... 0011 . . . 1 .... @2reg_shr_b
VSRI_2sh 1111 001 1 1 . ...... .... 0100 . . . 1 .... @2reg_shr_d
VSRI_2sh 1111 001 1 1 . ...... .... 0100 . . . 1 .... @2reg_shr_s
VSRI_2sh 1111 001 1 1 . ...... .... 0100 . . . 1 .... @2reg_shr_h
VSRI_2sh 1111 001 1 1 . ...... .... 0100 . . . 1 .... @2reg_shr_b
VSHL_2sh 1111 001 0 1 . ...... .... 0101 . . . 1 .... @2reg_shl_d
VSHL_2sh 1111 001 0 1 . ...... .... 0101 . . . 1 .... @2reg_shl_s
VSHL_2sh 1111 001 0 1 . ...... .... 0101 . . . 1 .... @2reg_shl_h
VSHL_2sh 1111 001 0 1 . ...... .... 0101 . . . 1 .... @2reg_shl_b
VSLI_2sh 1111 001 1 1 . ...... .... 0101 . . . 1 .... @2reg_shl_d
VSLI_2sh 1111 001 1 1 . ...... .... 0101 . . . 1 .... @2reg_shl_s
VSLI_2sh 1111 001 1 1 . ...... .... 0101 . . . 1 .... @2reg_shl_h
VSLI_2sh 1111 001 1 1 . ...... .... 0101 . . . 1 .... @2reg_shl_b
VQSHLU_64_2sh 1111 001 1 1 . ...... .... 0110 . . . 1 .... @2reg_shl_d
VQSHLU_2sh 1111 001 1 1 . ...... .... 0110 . . . 1 .... @2reg_shl_s
VQSHLU_2sh 1111 001 1 1 . ...... .... 0110 . . . 1 .... @2reg_shl_h
VQSHLU_2sh 1111 001 1 1 . ...... .... 0110 . . . 1 .... @2reg_shl_b
VQSHL_S_64_2sh 1111 001 0 1 . ...... .... 0111 . . . 1 .... @2reg_shl_d
VQSHL_S_2sh 1111 001 0 1 . ...... .... 0111 . . . 1 .... @2reg_shl_s
VQSHL_S_2sh 1111 001 0 1 . ...... .... 0111 . . . 1 .... @2reg_shl_h
VQSHL_S_2sh 1111 001 0 1 . ...... .... 0111 . . . 1 .... @2reg_shl_b
VQSHL_U_64_2sh 1111 001 1 1 . ...... .... 0111 . . . 1 .... @2reg_shl_d
VQSHL_U_2sh 1111 001 1 1 . ...... .... 0111 . . . 1 .... @2reg_shl_s
VQSHL_U_2sh 1111 001 1 1 . ...... .... 0111 . . . 1 .... @2reg_shl_h
VQSHL_U_2sh 1111 001 1 1 . ...... .... 0111 . . . 1 .... @2reg_shl_b
VSHRN_64_2sh 1111 001 0 1 . ...... .... 1000 . 0 . 1 .... @2reg_shrn_d
VSHRN_32_2sh 1111 001 0 1 . ...... .... 1000 . 0 . 1 .... @2reg_shrn_s
VSHRN_16_2sh 1111 001 0 1 . ...... .... 1000 . 0 . 1 .... @2reg_shrn_h
VRSHRN_64_2sh 1111 001 0 1 . ...... .... 1000 . 1 . 1 .... @2reg_shrn_d
VRSHRN_32_2sh 1111 001 0 1 . ...... .... 1000 . 1 . 1 .... @2reg_shrn_s
VRSHRN_16_2sh 1111 001 0 1 . ...... .... 1000 . 1 . 1 .... @2reg_shrn_h
VQSHRUN_64_2sh 1111 001 1 1 . ...... .... 1000 . 0 . 1 .... @2reg_shrn_d
VQSHRUN_32_2sh 1111 001 1 1 . ...... .... 1000 . 0 . 1 .... @2reg_shrn_s
VQSHRUN_16_2sh 1111 001 1 1 . ...... .... 1000 . 0 . 1 .... @2reg_shrn_h
VQRSHRUN_64_2sh 1111 001 1 1 . ...... .... 1000 . 1 . 1 .... @2reg_shrn_d
VQRSHRUN_32_2sh 1111 001 1 1 . ...... .... 1000 . 1 . 1 .... @2reg_shrn_s
VQRSHRUN_16_2sh 1111 001 1 1 . ...... .... 1000 . 1 . 1 .... @2reg_shrn_h
# VQSHRN with signed input
VQSHRN_S64_2sh 1111 001 0 1 . ...... .... 1001 . 0 . 1 .... @2reg_shrn_d
VQSHRN_S32_2sh 1111 001 0 1 . ...... .... 1001 . 0 . 1 .... @2reg_shrn_s
VQSHRN_S16_2sh 1111 001 0 1 . ...... .... 1001 . 0 . 1 .... @2reg_shrn_h
# VQRSHRN with signed input
VQRSHRN_S64_2sh 1111 001 0 1 . ...... .... 1001 . 1 . 1 .... @2reg_shrn_d
VQRSHRN_S32_2sh 1111 001 0 1 . ...... .... 1001 . 1 . 1 .... @2reg_shrn_s
VQRSHRN_S16_2sh 1111 001 0 1 . ...... .... 1001 . 1 . 1 .... @2reg_shrn_h
# VQSHRN with unsigned input
VQSHRN_U64_2sh 1111 001 1 1 . ...... .... 1001 . 0 . 1 .... @2reg_shrn_d
VQSHRN_U32_2sh 1111 001 1 1 . ...... .... 1001 . 0 . 1 .... @2reg_shrn_s
VQSHRN_U16_2sh 1111 001 1 1 . ...... .... 1001 . 0 . 1 .... @2reg_shrn_h
# VQRSHRN with unsigned input
VQRSHRN_U64_2sh 1111 001 1 1 . ...... .... 1001 . 1 . 1 .... @2reg_shrn_d
VQRSHRN_U32_2sh 1111 001 1 1 . ...... .... 1001 . 1 . 1 .... @2reg_shrn_s
VQRSHRN_U16_2sh 1111 001 1 1 . ...... .... 1001 . 1 . 1 .... @2reg_shrn_h
VSHLL_S_2sh 1111 001 0 1 . ...... .... 1010 . 0 . 1 .... @2reg_shll_s
VSHLL_S_2sh 1111 001 0 1 . ...... .... 1010 . 0 . 1 .... @2reg_shll_h
VSHLL_S_2sh 1111 001 0 1 . ...... .... 1010 . 0 . 1 .... @2reg_shll_b
VSHLL_U_2sh 1111 001 1 1 . ...... .... 1010 . 0 . 1 .... @2reg_shll_s
VSHLL_U_2sh 1111 001 1 1 . ...... .... 1010 . 0 . 1 .... @2reg_shll_h
VSHLL_U_2sh 1111 001 1 1 . ...... .... 1010 . 0 . 1 .... @2reg_shll_b
# VCVT fixed<->float conversions
VCVT_SH_2sh 1111 001 0 1 . ...... .... 1100 0 . . 1 .... @2reg_vcvt_f16
VCVT_UH_2sh 1111 001 1 1 . ...... .... 1100 0 . . 1 .... @2reg_vcvt_f16
VCVT_HS_2sh 1111 001 0 1 . ...... .... 1101 0 . . 1 .... @2reg_vcvt_f16
VCVT_HU_2sh 1111 001 1 1 . ...... .... 1101 0 . . 1 .... @2reg_vcvt_f16
VCVT_SF_2sh 1111 001 0 1 . ...... .... 1110 0 . . 1 .... @2reg_vcvt
VCVT_UF_2sh 1111 001 1 1 . ...... .... 1110 0 . . 1 .... @2reg_vcvt
VCVT_FS_2sh 1111 001 0 1 . ...... .... 1111 0 . . 1 .... @2reg_vcvt
VCVT_FU_2sh 1111 001 1 1 . ...... .... 1111 0 . . 1 .... @2reg_vcvt
######################################################################
# 1-reg-and-modified-immediate grouping:
# 1111 001 i 1 D 000 imm:3 Vd:4 cmode:4 0 Q op 1 Vm:4
######################################################################
&1reg_imm vd q imm cmode op
%asimd_imm_value 24:1 16:3 0:4
@1reg_imm .... ... . . . ... ... .... .... . q:1 . . .... \
&1reg_imm imm=%asimd_imm_value vd=%vd_dp
# The cmode/op bits here decode VORR/VBIC/VMOV/VMNV, but
# not in a way we can conveniently represent in decodetree without
# a lot of repetition:
# VORR: op=0, (cmode & 1) && cmode < 12
# VBIC: op=1, (cmode & 1) && cmode < 12
# VMOV: everything else
# So we have a single decode line and check the cmode/op in the
# trans function.
Vimm_1r 1111 001 . 1 . 000 ... .... cmode:4 0 . op:1 1 .... @1reg_imm
######################################################################
# Within the "two registers, or three registers of different lengths"
# grouping ([23,4]=0b10), bits [21:20] are either part of the opcode
# decode: 0b11 for VEXT, two-reg-misc, VTBL, and duplicate-scalar;
# or they are a size field for the three-reg-different-lengths and
# two-reg-and-scalar insn groups (where size cannot be 0b11). This
# is slightly awkward for decodetree: we handle it with this
# non-exclusive group which contains within it two exclusive groups:
# one for the size=0b11 patterns, and one for the size-not-0b11
# patterns. This allows us to check that none of the insns within
# each subgroup accidentally overlap each other. Note that all the
# trans functions for the size-not-0b11 patterns must check and
# return false for size==3.
######################################################################
{
[
##################################################################
# Miscellaneous size=0b11 insns
##################################################################
VEXT 1111 001 0 1 . 11 .... .... imm:4 . q:1 . 0 .... \
vm=%vm_dp vn=%vn_dp vd=%vd_dp
VTBL 1111 001 1 1 . 11 .... .... 10 len:2 . op:1 . 0 .... \
vm=%vm_dp vn=%vn_dp vd=%vd_dp
VDUP_scalar 1111 001 1 1 . 11 index:3 1 .... 11 000 q:1 . 0 .... \
vm=%vm_dp vd=%vd_dp size=0
VDUP_scalar 1111 001 1 1 . 11 index:2 10 .... 11 000 q:1 . 0 .... \
vm=%vm_dp vd=%vd_dp size=1
VDUP_scalar 1111 001 1 1 . 11 index:1 100 .... 11 000 q:1 . 0 .... \
vm=%vm_dp vd=%vd_dp size=2
##################################################################
# 2-reg-misc grouping:
# 1111 001 11 D 11 size:2 opc1:2 Vd:4 0 opc2:4 q:1 M 0 Vm:4
##################################################################
&2misc vd vm q size
@2misc .... ... .. . .. size:2 .. .... . .... q:1 . . .... \
&2misc vm=%vm_dp vd=%vd_dp
@2misc_q0 .... ... .. . .. size:2 .. .... . .... . . . .... \
&2misc vm=%vm_dp vd=%vd_dp q=0
@2misc_q1 .... ... .. . .. size:2 .. .... . .... . . . .... \
&2misc vm=%vm_dp vd=%vd_dp q=1
VREV64 1111 001 11 . 11 .. 00 .... 0 0000 . . 0 .... @2misc
VREV32 1111 001 11 . 11 .. 00 .... 0 0001 . . 0 .... @2misc
VREV16 1111 001 11 . 11 .. 00 .... 0 0010 . . 0 .... @2misc
VPADDL_S 1111 001 11 . 11 .. 00 .... 0 0100 . . 0 .... @2misc
VPADDL_U 1111 001 11 . 11 .. 00 .... 0 0101 . . 0 .... @2misc
AESE 1111 001 11 . 11 .. 00 .... 0 0110 0 . 0 .... @2misc_q1
AESD 1111 001 11 . 11 .. 00 .... 0 0110 1 . 0 .... @2misc_q1
AESMC 1111 001 11 . 11 .. 00 .... 0 0111 0 . 0 .... @2misc_q1
AESIMC 1111 001 11 . 11 .. 00 .... 0 0111 1 . 0 .... @2misc_q1
VCLS 1111 001 11 . 11 .. 00 .... 0 1000 . . 0 .... @2misc
VCLZ 1111 001 11 . 11 .. 00 .... 0 1001 . . 0 .... @2misc
VCNT 1111 001 11 . 11 .. 00 .... 0 1010 . . 0 .... @2misc
VMVN 1111 001 11 . 11 .. 00 .... 0 1011 . . 0 .... @2misc
VPADAL_S 1111 001 11 . 11 .. 00 .... 0 1100 . . 0 .... @2misc
VPADAL_U 1111 001 11 . 11 .. 00 .... 0 1101 . . 0 .... @2misc
VQABS 1111 001 11 . 11 .. 00 .... 0 1110 . . 0 .... @2misc
VQNEG 1111 001 11 . 11 .. 00 .... 0 1111 . . 0 .... @2misc
VCGT0 1111 001 11 . 11 .. 01 .... 0 0000 . . 0 .... @2misc
VCGE0 1111 001 11 . 11 .. 01 .... 0 0001 . . 0 .... @2misc
VCEQ0 1111 001 11 . 11 .. 01 .... 0 0010 . . 0 .... @2misc
VCLE0 1111 001 11 . 11 .. 01 .... 0 0011 . . 0 .... @2misc
VCLT0 1111 001 11 . 11 .. 01 .... 0 0100 . . 0 .... @2misc
SHA1H 1111 001 11 . 11 .. 01 .... 0 0101 1 . 0 .... @2misc_q1
VABS 1111 001 11 . 11 .. 01 .... 0 0110 . . 0 .... @2misc
VNEG 1111 001 11 . 11 .. 01 .... 0 0111 . . 0 .... @2misc
VCGT0_F 1111 001 11 . 11 .. 01 .... 0 1000 . . 0 .... @2misc
VCGE0_F 1111 001 11 . 11 .. 01 .... 0 1001 . . 0 .... @2misc
VCEQ0_F 1111 001 11 . 11 .. 01 .... 0 1010 . . 0 .... @2misc
VCLE0_F 1111 001 11 . 11 .. 01 .... 0 1011 . . 0 .... @2misc
VCLT0_F 1111 001 11 . 11 .. 01 .... 0 1100 . . 0 .... @2misc
VABS_F 1111 001 11 . 11 .. 01 .... 0 1110 . . 0 .... @2misc
VNEG_F 1111 001 11 . 11 .. 01 .... 0 1111 . . 0 .... @2misc
VSWP 1111 001 11 . 11 .. 10 .... 0 0000 . . 0 .... @2misc
VTRN 1111 001 11 . 11 .. 10 .... 0 0001 . . 0 .... @2misc
VUZP 1111 001 11 . 11 .. 10 .... 0 0010 . . 0 .... @2misc
VZIP 1111 001 11 . 11 .. 10 .... 0 0011 . . 0 .... @2misc
VMOVN 1111 001 11 . 11 .. 10 .... 0 0100 0 . 0 .... @2misc_q0
# VQMOVUN: unsigned result (source is always signed)
VQMOVUN 1111 001 11 . 11 .. 10 .... 0 0100 1 . 0 .... @2misc_q0
# VQMOVN: signed result, source may be signed (_S) or unsigned (_U)
VQMOVN_S 1111 001 11 . 11 .. 10 .... 0 0101 0 . 0 .... @2misc_q0
VQMOVN_U 1111 001 11 . 11 .. 10 .... 0 0101 1 . 0 .... @2misc_q0
VSHLL 1111 001 11 . 11 .. 10 .... 0 0110 0 . 0 .... @2misc_q0
SHA1SU1 1111 001 11 . 11 .. 10 .... 0 0111 0 . 0 .... @2misc_q1
SHA256SU0 1111 001 11 . 11 .. 10 .... 0 0111 1 . 0 .... @2misc_q1
VRINTN 1111 001 11 . 11 .. 10 .... 0 1000 . . 0 .... @2misc
VRINTX 1111 001 11 . 11 .. 10 .... 0 1001 . . 0 .... @2misc
VRINTA 1111 001 11 . 11 .. 10 .... 0 1010 . . 0 .... @2misc
VRINTZ 1111 001 11 . 11 .. 10 .... 0 1011 . . 0 .... @2misc
VCVT_F16_F32 1111 001 11 . 11 .. 10 .... 0 1100 0 . 0 .... @2misc_q0
VCVT_B16_F32 1111 001 11 . 11 .. 10 .... 0 1100 1 . 0 .... @2misc_q0
VRINTM 1111 001 11 . 11 .. 10 .... 0 1101 . . 0 .... @2misc
VCVT_F32_F16 1111 001 11 . 11 .. 10 .... 0 1110 0 . 0 .... @2misc_q0
VRINTP 1111 001 11 . 11 .. 10 .... 0 1111 . . 0 .... @2misc
VCVTAS 1111 001 11 . 11 .. 11 .... 0 0000 . . 0 .... @2misc
VCVTAU 1111 001 11 . 11 .. 11 .... 0 0001 . . 0 .... @2misc
VCVTNS 1111 001 11 . 11 .. 11 .... 0 0010 . . 0 .... @2misc
VCVTNU 1111 001 11 . 11 .. 11 .... 0 0011 . . 0 .... @2misc
VCVTPS 1111 001 11 . 11 .. 11 .... 0 0100 . . 0 .... @2misc
VCVTPU 1111 001 11 . 11 .. 11 .... 0 0101 . . 0 .... @2misc
VCVTMS 1111 001 11 . 11 .. 11 .... 0 0110 . . 0 .... @2misc
VCVTMU 1111 001 11 . 11 .. 11 .... 0 0111 . . 0 .... @2misc
VRECPE 1111 001 11 . 11 .. 11 .... 0 1000 . . 0 .... @2misc
VRSQRTE 1111 001 11 . 11 .. 11 .... 0 1001 . . 0 .... @2misc
VRECPE_F 1111 001 11 . 11 .. 11 .... 0 1010 . . 0 .... @2misc
VRSQRTE_F 1111 001 11 . 11 .. 11 .... 0 1011 . . 0 .... @2misc
VCVT_FS 1111 001 11 . 11 .. 11 .... 0 1100 . . 0 .... @2misc
VCVT_FU 1111 001 11 . 11 .. 11 .... 0 1101 . . 0 .... @2misc
VCVT_SF 1111 001 11 . 11 .. 11 .... 0 1110 . . 0 .... @2misc
VCVT_UF 1111 001 11 . 11 .. 11 .... 0 1111 . . 0 .... @2misc
]
# Subgroup for size != 0b11
[
##################################################################
# 3-reg-different-length grouping:
# 1111 001 U 1 D sz!=11 Vn:4 Vd:4 opc:4 N 0 M 0 Vm:4
##################################################################
&3diff vm vn vd size
@3diff .... ... . . . size:2 .... .... .... . . . . .... \
&3diff vm=%vm_dp vn=%vn_dp vd=%vd_dp
VADDL_S_3d 1111 001 0 1 . .. .... .... 0000 . 0 . 0 .... @3diff
VADDL_U_3d 1111 001 1 1 . .. .... .... 0000 . 0 . 0 .... @3diff
VADDW_S_3d 1111 001 0 1 . .. .... .... 0001 . 0 . 0 .... @3diff
VADDW_U_3d 1111 001 1 1 . .. .... .... 0001 . 0 . 0 .... @3diff
VSUBL_S_3d 1111 001 0 1 . .. .... .... 0010 . 0 . 0 .... @3diff
VSUBL_U_3d 1111 001 1 1 . .. .... .... 0010 . 0 . 0 .... @3diff
VSUBW_S_3d 1111 001 0 1 . .. .... .... 0011 . 0 . 0 .... @3diff
VSUBW_U_3d 1111 001 1 1 . .. .... .... 0011 . 0 . 0 .... @3diff
VADDHN_3d 1111 001 0 1 . .. .... .... 0100 . 0 . 0 .... @3diff
VRADDHN_3d 1111 001 1 1 . .. .... .... 0100 . 0 . 0 .... @3diff
VABAL_S_3d 1111 001 0 1 . .. .... .... 0101 . 0 . 0 .... @3diff
VABAL_U_3d 1111 001 1 1 . .. .... .... 0101 . 0 . 0 .... @3diff
VSUBHN_3d 1111 001 0 1 . .. .... .... 0110 . 0 . 0 .... @3diff
VRSUBHN_3d 1111 001 1 1 . .. .... .... 0110 . 0 . 0 .... @3diff
VABDL_S_3d 1111 001 0 1 . .. .... .... 0111 . 0 . 0 .... @3diff
VABDL_U_3d 1111 001 1 1 . .. .... .... 0111 . 0 . 0 .... @3diff
VMLAL_S_3d 1111 001 0 1 . .. .... .... 1000 . 0 . 0 .... @3diff
VMLAL_U_3d 1111 001 1 1 . .. .... .... 1000 . 0 . 0 .... @3diff
VQDMLAL_3d 1111 001 0 1 . .. .... .... 1001 . 0 . 0 .... @3diff
VMLSL_S_3d 1111 001 0 1 . .. .... .... 1010 . 0 . 0 .... @3diff
VMLSL_U_3d 1111 001 1 1 . .. .... .... 1010 . 0 . 0 .... @3diff
VQDMLSL_3d 1111 001 0 1 . .. .... .... 1011 . 0 . 0 .... @3diff
VMULL_S_3d 1111 001 0 1 . .. .... .... 1100 . 0 . 0 .... @3diff
VMULL_U_3d 1111 001 1 1 . .. .... .... 1100 . 0 . 0 .... @3diff
VQDMULL_3d 1111 001 0 1 . .. .... .... 1101 . 0 . 0 .... @3diff
VMULL_P_3d 1111 001 0 1 . .. .... .... 1110 . 0 . 0 .... @3diff
##################################################################
# 2-regs-plus-scalar grouping:
# 1111 001 Q 1 D sz!=11 Vn:4 Vd:4 opc:4 N 1 M 0 Vm:4
##################################################################
&2scalar vm vn vd size q
@2scalar .... ... q:1 . . size:2 .... .... .... . . . . .... \
&2scalar vm=%vm_dp vn=%vn_dp vd=%vd_dp
# For the 'long' ops the Q bit is part of insn decode
@2scalar_q0 .... ... . . . size:2 .... .... .... . . . . .... \
&2scalar vm=%vm_dp vn=%vn_dp vd=%vd_dp q=0
VMLA_2sc 1111 001 . 1 . .. .... .... 0000 . 1 . 0 .... @2scalar
VMLA_F_2sc 1111 001 . 1 . .. .... .... 0001 . 1 . 0 .... @2scalar
VMLAL_S_2sc 1111 001 0 1 . .. .... .... 0010 . 1 . 0 .... @2scalar_q0
VMLAL_U_2sc 1111 001 1 1 . .. .... .... 0010 . 1 . 0 .... @2scalar_q0
VQDMLAL_2sc 1111 001 0 1 . .. .... .... 0011 . 1 . 0 .... @2scalar_q0
VMLS_2sc 1111 001 . 1 . .. .... .... 0100 . 1 . 0 .... @2scalar
VMLS_F_2sc 1111 001 . 1 . .. .... .... 0101 . 1 . 0 .... @2scalar
VMLSL_S_2sc 1111 001 0 1 . .. .... .... 0110 . 1 . 0 .... @2scalar_q0
VMLSL_U_2sc 1111 001 1 1 . .. .... .... 0110 . 1 . 0 .... @2scalar_q0
VQDMLSL_2sc 1111 001 0 1 . .. .... .... 0111 . 1 . 0 .... @2scalar_q0
VMUL_2sc 1111 001 . 1 . .. .... .... 1000 . 1 . 0 .... @2scalar
VMUL_F_2sc 1111 001 . 1 . .. .... .... 1001 . 1 . 0 .... @2scalar
VMULL_S_2sc 1111 001 0 1 . .. .... .... 1010 . 1 . 0 .... @2scalar_q0
VMULL_U_2sc 1111 001 1 1 . .. .... .... 1010 . 1 . 0 .... @2scalar_q0
VQDMULL_2sc 1111 001 0 1 . .. .... .... 1011 . 1 . 0 .... @2scalar_q0
VQDMULH_2sc 1111 001 . 1 . .. .... .... 1100 . 1 . 0 .... @2scalar
VQRDMULH_2sc 1111 001 . 1 . .. .... .... 1101 . 1 . 0 .... @2scalar
VQRDMLAH_2sc 1111 001 . 1 . .. .... .... 1110 . 1 . 0 .... @2scalar
VQRDMLSH_2sc 1111 001 . 1 . .. .... .... 1111 . 1 . 0 .... @2scalar
]
}
|