1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
| ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f | FileCheck %s --check-prefixes=AVX512,NOBW,NOVBMI,AVX512F
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bw | FileCheck %s --check-prefixes=AVX512,NOVBMI,AVX512BW
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bw,+avx512vbmi | FileCheck %s --check-prefixes=AVX512,AVX512BW,VBMI
define <8 x i64> @var_shuffle_v8i64(<8 x i64> %v, <8 x i64> %indices) nounwind {
; AVX512-LABEL: var_shuffle_v8i64:
; AVX512: # %bb.0:
; AVX512-NEXT: vpermpd %zmm0, %zmm1, %zmm0
; AVX512-NEXT: retq
%index0 = extractelement <8 x i64> %indices, i32 0
%index1 = extractelement <8 x i64> %indices, i32 1
%index2 = extractelement <8 x i64> %indices, i32 2
%index3 = extractelement <8 x i64> %indices, i32 3
%index4 = extractelement <8 x i64> %indices, i32 4
%index5 = extractelement <8 x i64> %indices, i32 5
%index6 = extractelement <8 x i64> %indices, i32 6
%index7 = extractelement <8 x i64> %indices, i32 7
%v0 = extractelement <8 x i64> %v, i64 %index0
%v1 = extractelement <8 x i64> %v, i64 %index1
%v2 = extractelement <8 x i64> %v, i64 %index2
%v3 = extractelement <8 x i64> %v, i64 %index3
%v4 = extractelement <8 x i64> %v, i64 %index4
%v5 = extractelement <8 x i64> %v, i64 %index5
%v6 = extractelement <8 x i64> %v, i64 %index6
%v7 = extractelement <8 x i64> %v, i64 %index7
%ret0 = insertelement <8 x i64> undef, i64 %v0, i32 0
%ret1 = insertelement <8 x i64> %ret0, i64 %v1, i32 1
%ret2 = insertelement <8 x i64> %ret1, i64 %v2, i32 2
%ret3 = insertelement <8 x i64> %ret2, i64 %v3, i32 3
%ret4 = insertelement <8 x i64> %ret3, i64 %v4, i32 4
%ret5 = insertelement <8 x i64> %ret4, i64 %v5, i32 5
%ret6 = insertelement <8 x i64> %ret5, i64 %v6, i32 6
%ret7 = insertelement <8 x i64> %ret6, i64 %v7, i32 7
ret <8 x i64> %ret7
}
define <16 x i32> @var_shuffle_v16i32(<16 x i32> %v, <16 x i32> %indices) nounwind {
; AVX512-LABEL: var_shuffle_v16i32:
; AVX512: # %bb.0:
; AVX512-NEXT: vpermps %zmm0, %zmm1, %zmm0
; AVX512-NEXT: retq
%index0 = extractelement <16 x i32> %indices, i32 0
%index1 = extractelement <16 x i32> %indices, i32 1
%index2 = extractelement <16 x i32> %indices, i32 2
%index3 = extractelement <16 x i32> %indices, i32 3
%index4 = extractelement <16 x i32> %indices, i32 4
%index5 = extractelement <16 x i32> %indices, i32 5
%index6 = extractelement <16 x i32> %indices, i32 6
%index7 = extractelement <16 x i32> %indices, i32 7
%index8 = extractelement <16 x i32> %indices, i32 8
%index9 = extractelement <16 x i32> %indices, i32 9
%index10 = extractelement <16 x i32> %indices, i32 10
%index11 = extractelement <16 x i32> %indices, i32 11
%index12 = extractelement <16 x i32> %indices, i32 12
%index13 = extractelement <16 x i32> %indices, i32 13
%index14 = extractelement <16 x i32> %indices, i32 14
%index15 = extractelement <16 x i32> %indices, i32 15
%v0 = extractelement <16 x i32> %v, i32 %index0
%v1 = extractelement <16 x i32> %v, i32 %index1
%v2 = extractelement <16 x i32> %v, i32 %index2
%v3 = extractelement <16 x i32> %v, i32 %index3
%v4 = extractelement <16 x i32> %v, i32 %index4
%v5 = extractelement <16 x i32> %v, i32 %index5
%v6 = extractelement <16 x i32> %v, i32 %index6
%v7 = extractelement <16 x i32> %v, i32 %index7
%v8 = extractelement <16 x i32> %v, i32 %index8
%v9 = extractelement <16 x i32> %v, i32 %index9
%v10 = extractelement <16 x i32> %v, i32 %index10
%v11 = extractelement <16 x i32> %v, i32 %index11
%v12 = extractelement <16 x i32> %v, i32 %index12
%v13 = extractelement <16 x i32> %v, i32 %index13
%v14 = extractelement <16 x i32> %v, i32 %index14
%v15 = extractelement <16 x i32> %v, i32 %index15
%ret0 = insertelement <16 x i32> undef, i32 %v0, i32 0
%ret1 = insertelement <16 x i32> %ret0, i32 %v1, i32 1
%ret2 = insertelement <16 x i32> %ret1, i32 %v2, i32 2
%ret3 = insertelement <16 x i32> %ret2, i32 %v3, i32 3
%ret4 = insertelement <16 x i32> %ret3, i32 %v4, i32 4
%ret5 = insertelement <16 x i32> %ret4, i32 %v5, i32 5
%ret6 = insertelement <16 x i32> %ret5, i32 %v6, i32 6
%ret7 = insertelement <16 x i32> %ret6, i32 %v7, i32 7
%ret8 = insertelement <16 x i32> %ret7, i32 %v8, i32 8
%ret9 = insertelement <16 x i32> %ret8, i32 %v9, i32 9
%ret10 = insertelement <16 x i32> %ret9, i32 %v10, i32 10
%ret11 = insertelement <16 x i32> %ret10, i32 %v11, i32 11
%ret12 = insertelement <16 x i32> %ret11, i32 %v12, i32 12
%ret13 = insertelement <16 x i32> %ret12, i32 %v13, i32 13
%ret14 = insertelement <16 x i32> %ret13, i32 %v14, i32 14
%ret15 = insertelement <16 x i32> %ret14, i32 %v15, i32 15
ret <16 x i32> %ret15
}
define <32 x i16> @var_shuffle_v32i16(<32 x i16> %v, <32 x i16> %indices) nounwind {
; NOBW-LABEL: var_shuffle_v32i16:
; NOBW: # %bb.0:
; NOBW-NEXT: pushq %rbp
; NOBW-NEXT: movq %rsp, %rbp
; NOBW-NEXT: andq $-64, %rsp
; NOBW-NEXT: subq $2112, %rsp # imm = 0x840
; NOBW-NEXT: vextracti64x4 $1, %zmm1, %ymm2
; NOBW-NEXT: vextracti128 $1, %ymm1, %xmm3
; NOBW-NEXT: vextracti128 $1, %ymm2, %xmm4
; NOBW-NEXT: vmovd %xmm4, %eax
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, (%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: movzwl 1536(%rsp,%rax,2), %eax
; NOBW-NEXT: vmovd %eax, %xmm0
; NOBW-NEXT: vpextrw $1, %xmm4, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $1, 1600(%rsp,%rax,2), %xmm0, %xmm0
; NOBW-NEXT: vpextrw $2, %xmm4, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $2, 1664(%rsp,%rax,2), %xmm0, %xmm0
; NOBW-NEXT: vpextrw $3, %xmm4, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $3, 1728(%rsp,%rax,2), %xmm0, %xmm0
; NOBW-NEXT: vpextrw $4, %xmm4, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $4, 1792(%rsp,%rax,2), %xmm0, %xmm0
; NOBW-NEXT: vpextrw $5, %xmm4, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $5, 1856(%rsp,%rax,2), %xmm0, %xmm0
; NOBW-NEXT: vpextrw $6, %xmm4, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $6, 1920(%rsp,%rax,2), %xmm0, %xmm0
; NOBW-NEXT: vpextrw $7, %xmm4, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $7, 1984(%rsp,%rax,2), %xmm0, %xmm0
; NOBW-NEXT: vmovd %xmm2, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: movzwl 1024(%rsp,%rax,2), %eax
; NOBW-NEXT: vmovd %eax, %xmm4
; NOBW-NEXT: vpextrw $1, %xmm2, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $1, 1088(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $2, %xmm2, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $2, 1152(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $3, %xmm2, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $3, 1216(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $4, %xmm2, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $4, 1280(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $5, %xmm2, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $5, 1344(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $6, %xmm2, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $6, 1408(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $7, %xmm2, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $7, 1472(%rsp,%rax,2), %xmm4, %xmm2
; NOBW-NEXT: vmovd %xmm3, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: movzwl 512(%rsp,%rax,2), %eax
; NOBW-NEXT: vmovd %eax, %xmm4
; NOBW-NEXT: vpextrw $1, %xmm3, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $1, 576(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $2, %xmm3, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $2, 640(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $3, %xmm3, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $3, 704(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $4, %xmm3, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $4, 768(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $5, %xmm3, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $5, 832(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $6, %xmm3, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $6, 896(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $7, %xmm3, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $7, 960(%rsp,%rax,2), %xmm4, %xmm3
; NOBW-NEXT: vmovd %xmm1, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: movzwl (%rsp,%rax,2), %eax
; NOBW-NEXT: vmovd %eax, %xmm4
; NOBW-NEXT: vpextrw $1, %xmm1, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $1, 64(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $2, %xmm1, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $2, 128(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $3, %xmm1, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $3, 192(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $4, %xmm1, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $4, 256(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $5, %xmm1, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $5, 320(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $6, %xmm1, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $6, 384(%rsp,%rax,2), %xmm4, %xmm4
; NOBW-NEXT: vpextrw $7, %xmm1, %eax
; NOBW-NEXT: andl $31, %eax
; NOBW-NEXT: vpinsrw $7, 448(%rsp,%rax,2), %xmm4, %xmm1
; NOBW-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0
; NOBW-NEXT: vinserti128 $1, %xmm3, %ymm1, %ymm1
; NOBW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
; NOBW-NEXT: movq %rbp, %rsp
; NOBW-NEXT: popq %rbp
; NOBW-NEXT: retq
;
; AVX512BW-LABEL: var_shuffle_v32i16:
; AVX512BW: # %bb.0:
; AVX512BW-NEXT: vpermw %zmm0, %zmm1, %zmm0
; AVX512BW-NEXT: retq
%index0 = extractelement <32 x i16> %indices, i32 0
%index1 = extractelement <32 x i16> %indices, i32 1
%index2 = extractelement <32 x i16> %indices, i32 2
%index3 = extractelement <32 x i16> %indices, i32 3
%index4 = extractelement <32 x i16> %indices, i32 4
%index5 = extractelement <32 x i16> %indices, i32 5
%index6 = extractelement <32 x i16> %indices, i32 6
%index7 = extractelement <32 x i16> %indices, i32 7
%index8 = extractelement <32 x i16> %indices, i32 8
%index9 = extractelement <32 x i16> %indices, i32 9
%index10 = extractelement <32 x i16> %indices, i32 10
%index11 = extractelement <32 x i16> %indices, i32 11
%index12 = extractelement <32 x i16> %indices, i32 12
%index13 = extractelement <32 x i16> %indices, i32 13
%index14 = extractelement <32 x i16> %indices, i32 14
%index15 = extractelement <32 x i16> %indices, i32 15
%index16 = extractelement <32 x i16> %indices, i32 16
%index17 = extractelement <32 x i16> %indices, i32 17
%index18 = extractelement <32 x i16> %indices, i32 18
%index19 = extractelement <32 x i16> %indices, i32 19
%index20 = extractelement <32 x i16> %indices, i32 20
%index21 = extractelement <32 x i16> %indices, i32 21
%index22 = extractelement <32 x i16> %indices, i32 22
%index23 = extractelement <32 x i16> %indices, i32 23
%index24 = extractelement <32 x i16> %indices, i32 24
%index25 = extractelement <32 x i16> %indices, i32 25
%index26 = extractelement <32 x i16> %indices, i32 26
%index27 = extractelement <32 x i16> %indices, i32 27
%index28 = extractelement <32 x i16> %indices, i32 28
%index29 = extractelement <32 x i16> %indices, i32 29
%index30 = extractelement <32 x i16> %indices, i32 30
%index31 = extractelement <32 x i16> %indices, i32 31
%v0 = extractelement <32 x i16> %v, i16 %index0
%v1 = extractelement <32 x i16> %v, i16 %index1
%v2 = extractelement <32 x i16> %v, i16 %index2
%v3 = extractelement <32 x i16> %v, i16 %index3
%v4 = extractelement <32 x i16> %v, i16 %index4
%v5 = extractelement <32 x i16> %v, i16 %index5
%v6 = extractelement <32 x i16> %v, i16 %index6
%v7 = extractelement <32 x i16> %v, i16 %index7
%v8 = extractelement <32 x i16> %v, i16 %index8
%v9 = extractelement <32 x i16> %v, i16 %index9
%v10 = extractelement <32 x i16> %v, i16 %index10
%v11 = extractelement <32 x i16> %v, i16 %index11
%v12 = extractelement <32 x i16> %v, i16 %index12
%v13 = extractelement <32 x i16> %v, i16 %index13
%v14 = extractelement <32 x i16> %v, i16 %index14
%v15 = extractelement <32 x i16> %v, i16 %index15
%v16 = extractelement <32 x i16> %v, i16 %index16
%v17 = extractelement <32 x i16> %v, i16 %index17
%v18 = extractelement <32 x i16> %v, i16 %index18
%v19 = extractelement <32 x i16> %v, i16 %index19
%v20 = extractelement <32 x i16> %v, i16 %index20
%v21 = extractelement <32 x i16> %v, i16 %index21
%v22 = extractelement <32 x i16> %v, i16 %index22
%v23 = extractelement <32 x i16> %v, i16 %index23
%v24 = extractelement <32 x i16> %v, i16 %index24
%v25 = extractelement <32 x i16> %v, i16 %index25
%v26 = extractelement <32 x i16> %v, i16 %index26
%v27 = extractelement <32 x i16> %v, i16 %index27
%v28 = extractelement <32 x i16> %v, i16 %index28
%v29 = extractelement <32 x i16> %v, i16 %index29
%v30 = extractelement <32 x i16> %v, i16 %index30
%v31 = extractelement <32 x i16> %v, i16 %index31
%ret0 = insertelement <32 x i16> undef, i16 %v0, i32 0
%ret1 = insertelement <32 x i16> %ret0, i16 %v1, i32 1
%ret2 = insertelement <32 x i16> %ret1, i16 %v2, i32 2
%ret3 = insertelement <32 x i16> %ret2, i16 %v3, i32 3
%ret4 = insertelement <32 x i16> %ret3, i16 %v4, i32 4
%ret5 = insertelement <32 x i16> %ret4, i16 %v5, i32 5
%ret6 = insertelement <32 x i16> %ret5, i16 %v6, i32 6
%ret7 = insertelement <32 x i16> %ret6, i16 %v7, i32 7
%ret8 = insertelement <32 x i16> %ret7, i16 %v8, i32 8
%ret9 = insertelement <32 x i16> %ret8, i16 %v9, i32 9
%ret10 = insertelement <32 x i16> %ret9, i16 %v10, i32 10
%ret11 = insertelement <32 x i16> %ret10, i16 %v11, i32 11
%ret12 = insertelement <32 x i16> %ret11, i16 %v12, i32 12
%ret13 = insertelement <32 x i16> %ret12, i16 %v13, i32 13
%ret14 = insertelement <32 x i16> %ret13, i16 %v14, i32 14
%ret15 = insertelement <32 x i16> %ret14, i16 %v15, i32 15
%ret16 = insertelement <32 x i16> %ret15, i16 %v16, i32 16
%ret17 = insertelement <32 x i16> %ret16, i16 %v17, i32 17
%ret18 = insertelement <32 x i16> %ret17, i16 %v18, i32 18
%ret19 = insertelement <32 x i16> %ret18, i16 %v19, i32 19
%ret20 = insertelement <32 x i16> %ret19, i16 %v20, i32 20
%ret21 = insertelement <32 x i16> %ret20, i16 %v21, i32 21
%ret22 = insertelement <32 x i16> %ret21, i16 %v22, i32 22
%ret23 = insertelement <32 x i16> %ret22, i16 %v23, i32 23
%ret24 = insertelement <32 x i16> %ret23, i16 %v24, i32 24
%ret25 = insertelement <32 x i16> %ret24, i16 %v25, i32 25
%ret26 = insertelement <32 x i16> %ret25, i16 %v26, i32 26
%ret27 = insertelement <32 x i16> %ret26, i16 %v27, i32 27
%ret28 = insertelement <32 x i16> %ret27, i16 %v28, i32 28
%ret29 = insertelement <32 x i16> %ret28, i16 %v29, i32 29
%ret30 = insertelement <32 x i16> %ret29, i16 %v30, i32 30
%ret31 = insertelement <32 x i16> %ret30, i16 %v31, i32 31
ret <32 x i16> %ret31
}
define <64 x i8> @var_shuffle_v64i8(<64 x i8> %v, <64 x i8> %indices) nounwind {
; NOBW-LABEL: var_shuffle_v64i8:
; NOBW: # %bb.0:
; NOBW-NEXT: pushq %rbp
; NOBW-NEXT: movq %rsp, %rbp
; NOBW-NEXT: andq $-64, %rsp
; NOBW-NEXT: subq $4160, %rsp # imm = 0x1040
; NOBW-NEXT: vextracti64x4 $1, %zmm1, %ymm2
; NOBW-NEXT: vextracti128 $1, %ymm1, %xmm3
; NOBW-NEXT: vextracti128 $1, %ymm2, %xmm4
; NOBW-NEXT: vpextrb $0, %xmm4, %eax
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, (%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: vmovaps %ymm0, {{[0-9]+}}(%rsp)
; NOBW-NEXT: movzbl 3072(%rsp,%rax), %eax
; NOBW-NEXT: vmovd %eax, %xmm0
; NOBW-NEXT: vpextrb $1, %xmm4, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $1, 3136(%rsp,%rax), %xmm0, %xmm0
; NOBW-NEXT: vpextrb $2, %xmm4, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $2, 3200(%rsp,%rax), %xmm0, %xmm0
; NOBW-NEXT: vpextrb $3, %xmm4, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $3, 3264(%rsp,%rax), %xmm0, %xmm0
; NOBW-NEXT: vpextrb $4, %xmm4, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $4, 3328(%rsp,%rax), %xmm0, %xmm0
; NOBW-NEXT: vpextrb $5, %xmm4, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $5, 3392(%rsp,%rax), %xmm0, %xmm0
; NOBW-NEXT: vpextrb $6, %xmm4, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $6, 3456(%rsp,%rax), %xmm0, %xmm0
; NOBW-NEXT: vpextrb $7, %xmm4, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $7, 3520(%rsp,%rax), %xmm0, %xmm0
; NOBW-NEXT: vpextrb $8, %xmm4, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $8, 3584(%rsp,%rax), %xmm0, %xmm0
; NOBW-NEXT: vpextrb $9, %xmm4, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $9, 3648(%rsp,%rax), %xmm0, %xmm0
; NOBW-NEXT: vpextrb $10, %xmm4, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $10, 3712(%rsp,%rax), %xmm0, %xmm0
; NOBW-NEXT: vpextrb $11, %xmm4, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $11, 3776(%rsp,%rax), %xmm0, %xmm0
; NOBW-NEXT: vpextrb $12, %xmm4, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $12, 3840(%rsp,%rax), %xmm0, %xmm0
; NOBW-NEXT: vpextrb $13, %xmm4, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $13, 3904(%rsp,%rax), %xmm0, %xmm0
; NOBW-NEXT: vpextrb $14, %xmm4, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $14, 3968(%rsp,%rax), %xmm0, %xmm0
; NOBW-NEXT: vpextrb $15, %xmm4, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $15, 4032(%rsp,%rax), %xmm0, %xmm0
; NOBW-NEXT: vpextrb $0, %xmm2, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: movzbl 2048(%rsp,%rax), %eax
; NOBW-NEXT: vmovd %eax, %xmm4
; NOBW-NEXT: vpextrb $1, %xmm2, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $1, 2112(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $2, %xmm2, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $2, 2176(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $3, %xmm2, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $3, 2240(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $4, %xmm2, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $4, 2304(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $5, %xmm2, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $5, 2368(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $6, %xmm2, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $6, 2432(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $7, %xmm2, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $7, 2496(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $8, %xmm2, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $8, 2560(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $9, %xmm2, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $9, 2624(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $10, %xmm2, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $10, 2688(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $11, %xmm2, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $11, 2752(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $12, %xmm2, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $12, 2816(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $13, %xmm2, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $13, 2880(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $14, %xmm2, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $14, 2944(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $15, %xmm2, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $15, 3008(%rsp,%rax), %xmm4, %xmm2
; NOBW-NEXT: vpextrb $0, %xmm3, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: movzbl 1024(%rsp,%rax), %eax
; NOBW-NEXT: vmovd %eax, %xmm4
; NOBW-NEXT: vpextrb $1, %xmm3, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $1, 1088(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $2, %xmm3, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $2, 1152(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $3, %xmm3, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $3, 1216(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $4, %xmm3, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $4, 1280(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $5, %xmm3, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $5, 1344(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $6, %xmm3, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $6, 1408(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $7, %xmm3, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $7, 1472(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $8, %xmm3, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $8, 1536(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $9, %xmm3, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $9, 1600(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $10, %xmm3, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $10, 1664(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $11, %xmm3, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $11, 1728(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $12, %xmm3, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $12, 1792(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $13, %xmm3, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $13, 1856(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $14, %xmm3, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $14, 1920(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $15, %xmm3, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $15, 1984(%rsp,%rax), %xmm4, %xmm3
; NOBW-NEXT: vpextrb $0, %xmm1, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: movzbl (%rsp,%rax), %eax
; NOBW-NEXT: vmovd %eax, %xmm4
; NOBW-NEXT: vpextrb $1, %xmm1, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $1, 64(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $2, %xmm1, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $2, 128(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $3, %xmm1, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $3, 192(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $4, %xmm1, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $4, 256(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $5, %xmm1, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $5, 320(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $6, %xmm1, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $6, 384(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $7, %xmm1, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $7, 448(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $8, %xmm1, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $8, 512(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $9, %xmm1, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $9, 576(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $10, %xmm1, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $10, 640(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $11, %xmm1, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $11, 704(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $12, %xmm1, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $12, 768(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $13, %xmm1, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $13, 832(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $14, %xmm1, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $14, 896(%rsp,%rax), %xmm4, %xmm4
; NOBW-NEXT: vpextrb $15, %xmm1, %eax
; NOBW-NEXT: andl $63, %eax
; NOBW-NEXT: vpinsrb $15, 960(%rsp,%rax), %xmm4, %xmm1
; NOBW-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0
; NOBW-NEXT: vinserti128 $1, %xmm3, %ymm1, %ymm1
; NOBW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
; NOBW-NEXT: movq %rbp, %rsp
; NOBW-NEXT: popq %rbp
; NOBW-NEXT: retq
;
; VBMI-LABEL: var_shuffle_v64i8:
; VBMI: # %bb.0:
; VBMI-NEXT: vpermb %zmm0, %zmm1, %zmm0
; VBMI-NEXT: retq
%index0 = extractelement <64 x i8> %indices, i32 0
%index1 = extractelement <64 x i8> %indices, i32 1
%index2 = extractelement <64 x i8> %indices, i32 2
%index3 = extractelement <64 x i8> %indices, i32 3
%index4 = extractelement <64 x i8> %indices, i32 4
%index5 = extractelement <64 x i8> %indices, i32 5
%index6 = extractelement <64 x i8> %indices, i32 6
%index7 = extractelement <64 x i8> %indices, i32 7
%index8 = extractelement <64 x i8> %indices, i32 8
%index9 = extractelement <64 x i8> %indices, i32 9
%index10 = extractelement <64 x i8> %indices, i32 10
%index11 = extractelement <64 x i8> %indices, i32 11
%index12 = extractelement <64 x i8> %indices, i32 12
%index13 = extractelement <64 x i8> %indices, i32 13
%index14 = extractelement <64 x i8> %indices, i32 14
%index15 = extractelement <64 x i8> %indices, i32 15
%index16 = extractelement <64 x i8> %indices, i32 16
%index17 = extractelement <64 x i8> %indices, i32 17
%index18 = extractelement <64 x i8> %indices, i32 18
%index19 = extractelement <64 x i8> %indices, i32 19
%index20 = extractelement <64 x i8> %indices, i32 20
%index21 = extractelement <64 x i8> %indices, i32 21
%index22 = extractelement <64 x i8> %indices, i32 22
%index23 = extractelement <64 x i8> %indices, i32 23
%index24 = extractelement <64 x i8> %indices, i32 24
%index25 = extractelement <64 x i8> %indices, i32 25
%index26 = extractelement <64 x i8> %indices, i32 26
%index27 = extractelement <64 x i8> %indices, i32 27
%index28 = extractelement <64 x i8> %indices, i32 28
%index29 = extractelement <64 x i8> %indices, i32 29
%index30 = extractelement <64 x i8> %indices, i32 30
%index31 = extractelement <64 x i8> %indices, i32 31
%index32 = extractelement <64 x i8> %indices, i32 32
%index33 = extractelement <64 x i8> %indices, i32 33
%index34 = extractelement <64 x i8> %indices, i32 34
%index35 = extractelement <64 x i8> %indices, i32 35
%index36 = extractelement <64 x i8> %indices, i32 36
%index37 = extractelement <64 x i8> %indices, i32 37
%index38 = extractelement <64 x i8> %indices, i32 38
%index39 = extractelement <64 x i8> %indices, i32 39
%index40 = extractelement <64 x i8> %indices, i32 40
%index41 = extractelement <64 x i8> %indices, i32 41
%index42 = extractelement <64 x i8> %indices, i32 42
%index43 = extractelement <64 x i8> %indices, i32 43
%index44 = extractelement <64 x i8> %indices, i32 44
%index45 = extractelement <64 x i8> %indices, i32 45
%index46 = extractelement <64 x i8> %indices, i32 46
%index47 = extractelement <64 x i8> %indices, i32 47
%index48 = extractelement <64 x i8> %indices, i32 48
%index49 = extractelement <64 x i8> %indices, i32 49
%index50 = extractelement <64 x i8> %indices, i32 50
%index51 = extractelement <64 x i8> %indices, i32 51
%index52 = extractelement <64 x i8> %indices, i32 52
%index53 = extractelement <64 x i8> %indices, i32 53
%index54 = extractelement <64 x i8> %indices, i32 54
%index55 = extractelement <64 x i8> %indices, i32 55
%index56 = extractelement <64 x i8> %indices, i32 56
%index57 = extractelement <64 x i8> %indices, i32 57
%index58 = extractelement <64 x i8> %indices, i32 58
%index59 = extractelement <64 x i8> %indices, i32 59
%index60 = extractelement <64 x i8> %indices, i32 60
%index61 = extractelement <64 x i8> %indices, i32 61
%index62 = extractelement <64 x i8> %indices, i32 62
%index63 = extractelement <64 x i8> %indices, i32 63
%v0 = extractelement <64 x i8> %v, i8 %index0
%v1 = extractelement <64 x i8> %v, i8 %index1
%v2 = extractelement <64 x i8> %v, i8 %index2
%v3 = extractelement <64 x i8> %v, i8 %index3
%v4 = extractelement <64 x i8> %v, i8 %index4
%v5 = extractelement <64 x i8> %v, i8 %index5
%v6 = extractelement <64 x i8> %v, i8 %index6
%v7 = extractelement <64 x i8> %v, i8 %index7
%v8 = extractelement <64 x i8> %v, i8 %index8
%v9 = extractelement <64 x i8> %v, i8 %index9
%v10 = extractelement <64 x i8> %v, i8 %index10
%v11 = extractelement <64 x i8> %v, i8 %index11
%v12 = extractelement <64 x i8> %v, i8 %index12
%v13 = extractelement <64 x i8> %v, i8 %index13
%v14 = extractelement <64 x i8> %v, i8 %index14
%v15 = extractelement <64 x i8> %v, i8 %index15
%v16 = extractelement <64 x i8> %v, i8 %index16
%v17 = extractelement <64 x i8> %v, i8 %index17
%v18 = extractelement <64 x i8> %v, i8 %index18
%v19 = extractelement <64 x i8> %v, i8 %index19
%v20 = extractelement <64 x i8> %v, i8 %index20
%v21 = extractelement <64 x i8> %v, i8 %index21
%v22 = extractelement <64 x i8> %v, i8 %index22
%v23 = extractelement <64 x i8> %v, i8 %index23
%v24 = extractelement <64 x i8> %v, i8 %index24
%v25 = extractelement <64 x i8> %v, i8 %index25
%v26 = extractelement <64 x i8> %v, i8 %index26
%v27 = extractelement <64 x i8> %v, i8 %index27
%v28 = extractelement <64 x i8> %v, i8 %index28
%v29 = extractelement <64 x i8> %v, i8 %index29
%v30 = extractelement <64 x i8> %v, i8 %index30
%v31 = extractelement <64 x i8> %v, i8 %index31
%v32 = extractelement <64 x i8> %v, i8 %index32
%v33 = extractelement <64 x i8> %v, i8 %index33
%v34 = extractelement <64 x i8> %v, i8 %index34
%v35 = extractelement <64 x i8> %v, i8 %index35
%v36 = extractelement <64 x i8> %v, i8 %index36
%v37 = extractelement <64 x i8> %v, i8 %index37
%v38 = extractelement <64 x i8> %v, i8 %index38
%v39 = extractelement <64 x i8> %v, i8 %index39
%v40 = extractelement <64 x i8> %v, i8 %index40
%v41 = extractelement <64 x i8> %v, i8 %index41
%v42 = extractelement <64 x i8> %v, i8 %index42
%v43 = extractelement <64 x i8> %v, i8 %index43
%v44 = extractelement <64 x i8> %v, i8 %index44
%v45 = extractelement <64 x i8> %v, i8 %index45
%v46 = extractelement <64 x i8> %v, i8 %index46
%v47 = extractelement <64 x i8> %v, i8 %index47
%v48 = extractelement <64 x i8> %v, i8 %index48
%v49 = extractelement <64 x i8> %v, i8 %index49
%v50 = extractelement <64 x i8> %v, i8 %index50
%v51 = extractelement <64 x i8> %v, i8 %index51
%v52 = extractelement <64 x i8> %v, i8 %index52
%v53 = extractelement <64 x i8> %v, i8 %index53
%v54 = extractelement <64 x i8> %v, i8 %index54
%v55 = extractelement <64 x i8> %v, i8 %index55
%v56 = extractelement <64 x i8> %v, i8 %index56
%v57 = extractelement <64 x i8> %v, i8 %index57
%v58 = extractelement <64 x i8> %v, i8 %index58
%v59 = extractelement <64 x i8> %v, i8 %index59
%v60 = extractelement <64 x i8> %v, i8 %index60
%v61 = extractelement <64 x i8> %v, i8 %index61
%v62 = extractelement <64 x i8> %v, i8 %index62
%v63 = extractelement <64 x i8> %v, i8 %index63
%ret0 = insertelement <64 x i8> undef, i8 %v0, i32 0
%ret1 = insertelement <64 x i8> %ret0, i8 %v1, i32 1
%ret2 = insertelement <64 x i8> %ret1, i8 %v2, i32 2
%ret3 = insertelement <64 x i8> %ret2, i8 %v3, i32 3
%ret4 = insertelement <64 x i8> %ret3, i8 %v4, i32 4
%ret5 = insertelement <64 x i8> %ret4, i8 %v5, i32 5
%ret6 = insertelement <64 x i8> %ret5, i8 %v6, i32 6
%ret7 = insertelement <64 x i8> %ret6, i8 %v7, i32 7
%ret8 = insertelement <64 x i8> %ret7, i8 %v8, i32 8
%ret9 = insertelement <64 x i8> %ret8, i8 %v9, i32 9
%ret10 = insertelement <64 x i8> %ret9, i8 %v10, i32 10
%ret11 = insertelement <64 x i8> %ret10, i8 %v11, i32 11
%ret12 = insertelement <64 x i8> %ret11, i8 %v12, i32 12
%ret13 = insertelement <64 x i8> %ret12, i8 %v13, i32 13
%ret14 = insertelement <64 x i8> %ret13, i8 %v14, i32 14
%ret15 = insertelement <64 x i8> %ret14, i8 %v15, i32 15
%ret16 = insertelement <64 x i8> %ret15, i8 %v16, i32 16
%ret17 = insertelement <64 x i8> %ret16, i8 %v17, i32 17
%ret18 = insertelement <64 x i8> %ret17, i8 %v18, i32 18
%ret19 = insertelement <64 x i8> %ret18, i8 %v19, i32 19
%ret20 = insertelement <64 x i8> %ret19, i8 %v20, i32 20
%ret21 = insertelement <64 x i8> %ret20, i8 %v21, i32 21
%ret22 = insertelement <64 x i8> %ret21, i8 %v22, i32 22
%ret23 = insertelement <64 x i8> %ret22, i8 %v23, i32 23
%ret24 = insertelement <64 x i8> %ret23, i8 %v24, i32 24
%ret25 = insertelement <64 x i8> %ret24, i8 %v25, i32 25
%ret26 = insertelement <64 x i8> %ret25, i8 %v26, i32 26
%ret27 = insertelement <64 x i8> %ret26, i8 %v27, i32 27
%ret28 = insertelement <64 x i8> %ret27, i8 %v28, i32 28
%ret29 = insertelement <64 x i8> %ret28, i8 %v29, i32 29
%ret30 = insertelement <64 x i8> %ret29, i8 %v30, i32 30
%ret31 = insertelement <64 x i8> %ret30, i8 %v31, i32 31
%ret32 = insertelement <64 x i8> %ret31, i8 %v32, i32 32
%ret33 = insertelement <64 x i8> %ret32, i8 %v33, i32 33
%ret34 = insertelement <64 x i8> %ret33, i8 %v34, i32 34
%ret35 = insertelement <64 x i8> %ret34, i8 %v35, i32 35
%ret36 = insertelement <64 x i8> %ret35, i8 %v36, i32 36
%ret37 = insertelement <64 x i8> %ret36, i8 %v37, i32 37
%ret38 = insertelement <64 x i8> %ret37, i8 %v38, i32 38
%ret39 = insertelement <64 x i8> %ret38, i8 %v39, i32 39
%ret40 = insertelement <64 x i8> %ret39, i8 %v40, i32 40
%ret41 = insertelement <64 x i8> %ret40, i8 %v41, i32 41
%ret42 = insertelement <64 x i8> %ret41, i8 %v42, i32 42
%ret43 = insertelement <64 x i8> %ret42, i8 %v43, i32 43
%ret44 = insertelement <64 x i8> %ret43, i8 %v44, i32 44
%ret45 = insertelement <64 x i8> %ret44, i8 %v45, i32 45
%ret46 = insertelement <64 x i8> %ret45, i8 %v46, i32 46
%ret47 = insertelement <64 x i8> %ret46, i8 %v47, i32 47
%ret48 = insertelement <64 x i8> %ret47, i8 %v48, i32 48
%ret49 = insertelement <64 x i8> %ret48, i8 %v49, i32 49
%ret50 = insertelement <64 x i8> %ret49, i8 %v50, i32 50
%ret51 = insertelement <64 x i8> %ret50, i8 %v51, i32 51
%ret52 = insertelement <64 x i8> %ret51, i8 %v52, i32 52
%ret53 = insertelement <64 x i8> %ret52, i8 %v53, i32 53
%ret54 = insertelement <64 x i8> %ret53, i8 %v54, i32 54
%ret55 = insertelement <64 x i8> %ret54, i8 %v55, i32 55
%ret56 = insertelement <64 x i8> %ret55, i8 %v56, i32 56
%ret57 = insertelement <64 x i8> %ret56, i8 %v57, i32 57
%ret58 = insertelement <64 x i8> %ret57, i8 %v58, i32 58
%ret59 = insertelement <64 x i8> %ret58, i8 %v59, i32 59
%ret60 = insertelement <64 x i8> %ret59, i8 %v60, i32 60
%ret61 = insertelement <64 x i8> %ret60, i8 %v61, i32 61
%ret62 = insertelement <64 x i8> %ret61, i8 %v62, i32 62
%ret63 = insertelement <64 x i8> %ret62, i8 %v63, i32 63
ret <64 x i8> %ret63
}
define <8 x double> @var_shuffle_v8f64(<8 x double> %v, <8 x i64> %indices) nounwind {
; AVX512-LABEL: var_shuffle_v8f64:
; AVX512: # %bb.0:
; AVX512-NEXT: vpermpd %zmm0, %zmm1, %zmm0
; AVX512-NEXT: retq
%index0 = extractelement <8 x i64> %indices, i32 0
%index1 = extractelement <8 x i64> %indices, i32 1
%index2 = extractelement <8 x i64> %indices, i32 2
%index3 = extractelement <8 x i64> %indices, i32 3
%index4 = extractelement <8 x i64> %indices, i32 4
%index5 = extractelement <8 x i64> %indices, i32 5
%index6 = extractelement <8 x i64> %indices, i32 6
%index7 = extractelement <8 x i64> %indices, i32 7
%v0 = extractelement <8 x double> %v, i64 %index0
%v1 = extractelement <8 x double> %v, i64 %index1
%v2 = extractelement <8 x double> %v, i64 %index2
%v3 = extractelement <8 x double> %v, i64 %index3
%v4 = extractelement <8 x double> %v, i64 %index4
%v5 = extractelement <8 x double> %v, i64 %index5
%v6 = extractelement <8 x double> %v, i64 %index6
%v7 = extractelement <8 x double> %v, i64 %index7
%ret0 = insertelement <8 x double> undef, double %v0, i32 0
%ret1 = insertelement <8 x double> %ret0, double %v1, i32 1
%ret2 = insertelement <8 x double> %ret1, double %v2, i32 2
%ret3 = insertelement <8 x double> %ret2, double %v3, i32 3
%ret4 = insertelement <8 x double> %ret3, double %v4, i32 4
%ret5 = insertelement <8 x double> %ret4, double %v5, i32 5
%ret6 = insertelement <8 x double> %ret5, double %v6, i32 6
%ret7 = insertelement <8 x double> %ret6, double %v7, i32 7
ret <8 x double> %ret7
}
define <16 x float> @var_shuffle_v16f32(<16 x float> %v, <16 x i32> %indices) nounwind {
; AVX512-LABEL: var_shuffle_v16f32:
; AVX512: # %bb.0:
; AVX512-NEXT: vpermps %zmm0, %zmm1, %zmm0
; AVX512-NEXT: retq
%index0 = extractelement <16 x i32> %indices, i32 0
%index1 = extractelement <16 x i32> %indices, i32 1
%index2 = extractelement <16 x i32> %indices, i32 2
%index3 = extractelement <16 x i32> %indices, i32 3
%index4 = extractelement <16 x i32> %indices, i32 4
%index5 = extractelement <16 x i32> %indices, i32 5
%index6 = extractelement <16 x i32> %indices, i32 6
%index7 = extractelement <16 x i32> %indices, i32 7
%index8 = extractelement <16 x i32> %indices, i32 8
%index9 = extractelement <16 x i32> %indices, i32 9
%index10 = extractelement <16 x i32> %indices, i32 10
%index11 = extractelement <16 x i32> %indices, i32 11
%index12 = extractelement <16 x i32> %indices, i32 12
%index13 = extractelement <16 x i32> %indices, i32 13
%index14 = extractelement <16 x i32> %indices, i32 14
%index15 = extractelement <16 x i32> %indices, i32 15
%v0 = extractelement <16 x float> %v, i32 %index0
%v1 = extractelement <16 x float> %v, i32 %index1
%v2 = extractelement <16 x float> %v, i32 %index2
%v3 = extractelement <16 x float> %v, i32 %index3
%v4 = extractelement <16 x float> %v, i32 %index4
%v5 = extractelement <16 x float> %v, i32 %index5
%v6 = extractelement <16 x float> %v, i32 %index6
%v7 = extractelement <16 x float> %v, i32 %index7
%v8 = extractelement <16 x float> %v, i32 %index8
%v9 = extractelement <16 x float> %v, i32 %index9
%v10 = extractelement <16 x float> %v, i32 %index10
%v11 = extractelement <16 x float> %v, i32 %index11
%v12 = extractelement <16 x float> %v, i32 %index12
%v13 = extractelement <16 x float> %v, i32 %index13
%v14 = extractelement <16 x float> %v, i32 %index14
%v15 = extractelement <16 x float> %v, i32 %index15
%ret0 = insertelement <16 x float> undef, float %v0, i32 0
%ret1 = insertelement <16 x float> %ret0, float %v1, i32 1
%ret2 = insertelement <16 x float> %ret1, float %v2, i32 2
%ret3 = insertelement <16 x float> %ret2, float %v3, i32 3
%ret4 = insertelement <16 x float> %ret3, float %v4, i32 4
%ret5 = insertelement <16 x float> %ret4, float %v5, i32 5
%ret6 = insertelement <16 x float> %ret5, float %v6, i32 6
%ret7 = insertelement <16 x float> %ret6, float %v7, i32 7
%ret8 = insertelement <16 x float> %ret7, float %v8, i32 8
%ret9 = insertelement <16 x float> %ret8, float %v9, i32 9
%ret10 = insertelement <16 x float> %ret9, float %v10, i32 10
%ret11 = insertelement <16 x float> %ret10, float %v11, i32 11
%ret12 = insertelement <16 x float> %ret11, float %v12, i32 12
%ret13 = insertelement <16 x float> %ret12, float %v13, i32 13
%ret14 = insertelement <16 x float> %ret13, float %v14, i32 14
%ret15 = insertelement <16 x float> %ret14, float %v15, i32 15
ret <16 x float> %ret15
}
|