@@ -73,12 +73,18 @@ TLI_DEFINE_VECFUNC("llvm.exp.f32", "_simd_exp_f4", FIXED(4), "_ZGV_LLVM_N4v")
73
73
74
74
// Trigonometric Functions
75
75
TLI_DEFINE_VECFUNC(" acos" , " _simd_acos_d2" , FIXED(2 ), "_ZGV_LLVM_N2v")
76
+ TLI_DEFINE_VECFUNC(" llvm.acos.f64" , " _simd_acos_d2" , FIXED(2 ), "_ZGV_LLVM_N2v")
76
77
TLI_DEFINE_VECFUNC(" acosf" , " _simd_acos_f4" , FIXED(4 ), "_ZGV_LLVM_N4v")
78
+ TLI_DEFINE_VECFUNC(" llvm.acos.f32" , " _simd_acos_f4" , FIXED(4 ), "_ZGV_LLVM_N4v")
77
79
TLI_DEFINE_VECFUNC(" asin" , " _simd_asin_d2" , FIXED(2 ), "_ZGV_LLVM_N2v")
80
+ TLI_DEFINE_VECFUNC(" llvm.asin.f64" , " _simd_asin_d2" , FIXED(2 ), "_ZGV_LLVM_N2v")
78
81
TLI_DEFINE_VECFUNC(" asinf" , " _simd_asin_f4" , FIXED(4 ), "_ZGV_LLVM_N4v")
82
+ TLI_DEFINE_VECFUNC(" llvm.asin.f32" , " _simd_asin_f4" , FIXED(4 ), "_ZGV_LLVM_N4v")
79
83
80
84
TLI_DEFINE_VECFUNC(" atan" , " _simd_atan_d2" , FIXED(2 ), "_ZGV_LLVM_N2v")
85
+ TLI_DEFINE_VECFUNC(" llvm.atan.f64" , " _simd_atan_d2" , FIXED(2 ), "_ZGV_LLVM_N2v")
81
86
TLI_DEFINE_VECFUNC(" atanf" , " _simd_atan_f4" , FIXED(4 ), "_ZGV_LLVM_N4v")
87
+ TLI_DEFINE_VECFUNC(" llvm.atan.f32" , " _simd_atan_f4" , FIXED(4 ), "_ZGV_LLVM_N4v")
82
88
TLI_DEFINE_VECFUNC(" atan2" , " _simd_atan2_d2" , FIXED(2 ), "_ZGV_LLVM_N2vv")
83
89
TLI_DEFINE_VECFUNC(" atan2f" , " _simd_atan2_f4" , FIXED(4 ), "_ZGV_LLVM_N4vv")
84
90
@@ -109,11 +115,17 @@ TLI_DEFINE_VECFUNC("llvm.pow.f32", "_simd_pow_f4", FIXED(4), "_ZGV_LLVM_N4vv")
109
115
110
116
// Hyperbolic Functions
111
117
TLI_DEFINE_VECFUNC(" sinh" , " _simd_sinh_d2" , FIXED(2 ), "_ZGV_LLVM_N2v")
118
+ TLI_DEFINE_VECFUNC(" llvm.sinh.f64" , " _simd_sinh_d2" , FIXED(2 ), "_ZGV_LLVM_N2v")
112
119
TLI_DEFINE_VECFUNC(" sinhf" , " _simd_sinh_f4" , FIXED(4 ), "_ZGV_LLVM_N4v")
120
+ TLI_DEFINE_VECFUNC(" llvm.sinh.f32" , " _simd_sinh_f4" , FIXED(4 ), "_ZGV_LLVM_N4v")
113
121
TLI_DEFINE_VECFUNC(" cosh" , " _simd_cosh_d2" , FIXED(2 ), "_ZGV_LLVM_N2v")
122
+ TLI_DEFINE_VECFUNC(" llvm.cosh.f64" , " _simd_cosh_d2" , FIXED(2 ), "_ZGV_LLVM_N2v")
114
123
TLI_DEFINE_VECFUNC(" coshf" , " _simd_cosh_f4" , FIXED(4 ), "_ZGV_LLVM_N4v")
124
+ TLI_DEFINE_VECFUNC(" llvm.cosh.f32" , " _simd_cosh_f4" , FIXED(4 ), "_ZGV_LLVM_N4v")
115
125
TLI_DEFINE_VECFUNC(" tanh" , " _simd_tanh_d2" , FIXED(2 ), "_ZGV_LLVM_N2v")
126
+ TLI_DEFINE_VECFUNC(" llvm.tanh.f64" , " _simd_tanh_d2" , FIXED(2 ), "_ZGV_LLVM_N2v")
116
127
TLI_DEFINE_VECFUNC(" tanhf" , " _simd_tanh_f4" , FIXED(4 ), "_ZGV_LLVM_N4v")
128
+ TLI_DEFINE_VECFUNC(" llvm.tanh.f32" , " _simd_tanh_f4" , FIXED(4 ), "_ZGV_LLVM_N4v")
117
129
TLI_DEFINE_VECFUNC(" asinh" , " _simd_asinh_d2" , FIXED(2 ), "_ZGV_LLVM_N2v")
118
130
TLI_DEFINE_VECFUNC(" asinhf" , " _simd_asinh_f4" , FIXED(4 ), "_ZGV_LLVM_N4v")
119
131
TLI_DEFINE_VECFUNC(" acosh" , " _simd_acosh_d2" , FIXED(2 ), "_ZGV_LLVM_N2v")
@@ -500,14 +512,17 @@ TLI_DEFINE_VECFUNC("__exp2f_finite", "__svml_exp2f16", FIXED(16), "_ZGV_LLVM_N16
500
512
#elif defined(TLI_DEFINE_SLEEFGNUABI_VF2_VECFUNCS)
501
513
502
514
TLI_DEFINE_VECFUNC (" acos" , " _ZGVnN2v_acos" , FIXED(2 ), "_ZGV_LLVM_N2v")
515
+ TLI_DEFINE_VECFUNC(" llvm.acos.f64" , " _ZGVnN2v_acos" , FIXED(2 ), "_ZGV_LLVM_N2v")
503
516
504
517
TLI_DEFINE_VECFUNC(" acosh" , " _ZGVnN2v_acosh" , FIXED(2 ), "_ZGV_LLVM_N2v")
505
518
506
519
TLI_DEFINE_VECFUNC(" asin" , " _ZGVnN2v_asin" , FIXED(2 ), "_ZGV_LLVM_N2v")
520
+ TLI_DEFINE_VECFUNC(" llvm.asin.f64" , " _ZGVnN2v_asin" , FIXED(2 ), "_ZGV_LLVM_N2v")
507
521
508
522
TLI_DEFINE_VECFUNC(" asinh" , " _ZGVnN2v_asinh" , FIXED(2 ), "_ZGV_LLVM_N2v")
509
523
510
524
TLI_DEFINE_VECFUNC(" atan" , " _ZGVnN2v_atan" , FIXED(2 ), "_ZGV_LLVM_N2v")
525
+ TLI_DEFINE_VECFUNC(" llvm.atan.f64" , " _ZGVnN2v_atan" , FIXED(2 ), "_ZGV_LLVM_N2v")
511
526
512
527
TLI_DEFINE_VECFUNC(" atan2" , " _ZGVnN2vv_atan2" , FIXED(2 ), "_ZGV_LLVM_N2vv")
513
528
@@ -521,6 +536,7 @@ TLI_DEFINE_VECFUNC("cos", "_ZGVnN2v_cos", FIXED(2), "_ZGV_LLVM_N2v")
521
536
TLI_DEFINE_VECFUNC(" llvm.cos.f64" , " _ZGVnN2v_cos" , FIXED(2 ), "_ZGV_LLVM_N2v")
522
537
523
538
TLI_DEFINE_VECFUNC(" cosh" , " _ZGVnN2v_cosh" , FIXED(2 ), "_ZGV_LLVM_N2v")
539
+ TLI_DEFINE_VECFUNC(" llvm.cosh.f64" , " _ZGVnN2v_cosh" , FIXED(2 ), "_ZGV_LLVM_N2v")
524
540
525
541
TLI_DEFINE_VECFUNC(" cospi" , " _ZGVnN2v_cospi" , FIXED(2 ), "_ZGV_LLVM_N2v")
526
542
@@ -583,6 +599,7 @@ TLI_DEFINE_VECFUNC("sincos", "_ZGVnN2vl8l8_sincos", FIXED(2), "_ZGV_LLVM_N2vl8l8
583
599
TLI_DEFINE_VECFUNC(" sincospi" , " _ZGVnN2vl8l8_sincospi" , FIXED(2 ), "_ZGV_LLVM_N2vl8l8")
584
600
585
601
TLI_DEFINE_VECFUNC(" sinh" , " _ZGVnN2v_sinh" , FIXED(2 ), "_ZGV_LLVM_N2v")
602
+ TLI_DEFINE_VECFUNC(" llvm.sinh.f64" , " _ZGVnN2v_sinh" , FIXED(2 ), "_ZGV_LLVM_N2v")
586
603
587
604
TLI_DEFINE_VECFUNC(" sinpi" , " _ZGVnN2v_sinpi" , FIXED(2 ), "_ZGV_LLVM_N2v")
588
605
@@ -592,20 +609,24 @@ TLI_DEFINE_VECFUNC("tan", "_ZGVnN2v_tan", FIXED(2), "_ZGV_LLVM_N2v")
592
609
TLI_DEFINE_VECFUNC(" llvm.tan.f64" , " _ZGVnN2v_tan" , FIXED(2 ), "_ZGV_LLVM_N2v")
593
610
594
611
TLI_DEFINE_VECFUNC(" tanh" , " _ZGVnN2v_tanh" , FIXED(2 ), "_ZGV_LLVM_N2v")
612
+ TLI_DEFINE_VECFUNC(" llvm.tanh.f64" , " _ZGVnN2v_tanh" , FIXED(2 ), "_ZGV_LLVM_N2v")
595
613
596
614
TLI_DEFINE_VECFUNC(" tgamma" , " _ZGVnN2v_tgamma" , FIXED(2 ), "_ZGV_LLVM_N2v")
597
615
598
616
#elif defined(TLI_DEFINE_SLEEFGNUABI_VF4_VECFUNCS)
599
617
600
618
TLI_DEFINE_VECFUNC (" acosf" , " _ZGVnN4v_acosf" , FIXED(4 ), "_ZGV_LLVM_N4v")
619
+ TLI_DEFINE_VECFUNC(" llvm.acos.f32" , " _ZGVnN4v_acosf" , FIXED(4 ), "_ZGV_LLVM_N4v")
601
620
602
621
TLI_DEFINE_VECFUNC(" acoshf" , " _ZGVnN4v_acoshf" , FIXED(4 ), "_ZGV_LLVM_N4v")
603
622
604
623
TLI_DEFINE_VECFUNC(" asinf" , " _ZGVnN4v_asinf" , FIXED(4 ), "_ZGV_LLVM_N4v")
624
+ TLI_DEFINE_VECFUNC(" llvm.asin.f32" , " _ZGVnN4v_asinf" , FIXED(4 ), "_ZGV_LLVM_N4v")
605
625
606
626
TLI_DEFINE_VECFUNC(" asinhf" , " _ZGVnN4v_asinhf" , FIXED(4 ), "_ZGV_LLVM_N4v")
607
627
608
628
TLI_DEFINE_VECFUNC(" atanf" , " _ZGVnN4v_atanf" , FIXED(4 ), "_ZGV_LLVM_N4v")
629
+ TLI_DEFINE_VECFUNC(" llvm.atan.f32" , " _ZGVnN4v_atanf" , FIXED(4 ), "_ZGV_LLVM_N4v")
609
630
610
631
TLI_DEFINE_VECFUNC(" atan2f" , " _ZGVnN4vv_atan2f" , FIXED(4 ), "_ZGV_LLVM_N4vv")
611
632
@@ -619,6 +640,7 @@ TLI_DEFINE_VECFUNC("cosf", "_ZGVnN4v_cosf", FIXED(4), "_ZGV_LLVM_N4v")
619
640
TLI_DEFINE_VECFUNC(" llvm.cos.f32" , " _ZGVnN4v_cosf" , FIXED(4 ), "_ZGV_LLVM_N4v")
620
641
621
642
TLI_DEFINE_VECFUNC(" coshf" , " _ZGVnN4v_coshf" , FIXED(4 ), "_ZGV_LLVM_N4v")
643
+ TLI_DEFINE_VECFUNC(" llvm.cosh.f32" , " _ZGVnN4v_coshf" , FIXED(4 ), "_ZGV_LLVM_N4v")
622
644
623
645
TLI_DEFINE_VECFUNC(" cospif" , " _ZGVnN4v_cospif" , FIXED(4 ), "_ZGV_LLVM_N4v")
624
646
@@ -681,6 +703,7 @@ TLI_DEFINE_VECFUNC("sincosf", "_ZGVnN4vl4l4_sincosf", FIXED(4), "_ZGV_LLVM_N4vl4
681
703
TLI_DEFINE_VECFUNC(" sincospif" , " _ZGVnN4vl4l4_sincospif" , FIXED(4 ), "_ZGV_LLVM_N4vl4l4")
682
704
683
705
TLI_DEFINE_VECFUNC(" sinhf" , " _ZGVnN4v_sinhf" , FIXED(4 ), "_ZGV_LLVM_N4v")
706
+ TLI_DEFINE_VECFUNC(" llvm.sinh.f32" , " _ZGVnN4v_sinhf" , FIXED(4 ), "_ZGV_LLVM_N4v")
684
707
685
708
TLI_DEFINE_VECFUNC(" sinpif" , " _ZGVnN4v_sinpif" , FIXED(4 ), "_ZGV_LLVM_N4v")
686
709
@@ -690,25 +713,32 @@ TLI_DEFINE_VECFUNC("tanf", "_ZGVnN4v_tanf", FIXED(4), "_ZGV_LLVM_N4v")
690
713
TLI_DEFINE_VECFUNC(" llvm.tan.f32" , " _ZGVnN4v_tanf" , FIXED(4 ), "_ZGV_LLVM_N4v")
691
714
692
715
TLI_DEFINE_VECFUNC(" tanhf" , " _ZGVnN4v_tanhf" , FIXED(4 ), "_ZGV_LLVM_N4v")
716
+ TLI_DEFINE_VECFUNC(" llvm.tanh.f32" , " _ZGVnN4v_tanhf" , FIXED(4 ), "_ZGV_LLVM_N4v")
693
717
694
718
TLI_DEFINE_VECFUNC(" tgammaf" , " _ZGVnN4v_tgammaf" , FIXED(4 ), "_ZGV_LLVM_N4v")
695
719
696
720
#elif defined(TLI_DEFINE_SLEEFGNUABI_SCALABLE_VECFUNCS)
697
721
698
722
TLI_DEFINE_VECFUNC (" acos" , " _ZGVsMxv_acos" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
699
723
TLI_DEFINE_VECFUNC(" acosf" , " _ZGVsMxv_acosf" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
724
+ TLI_DEFINE_VECFUNC(" llvm.acos.f64" , " _ZGVsMxv_acos" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
725
+ TLI_DEFINE_VECFUNC(" llvm.acos.f32" , " _ZGVsMxv_acosf" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
700
726
701
727
TLI_DEFINE_VECFUNC(" acosh" , " _ZGVsMxv_acosh" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
702
728
TLI_DEFINE_VECFUNC(" acoshf" , " _ZGVsMxv_acoshf" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
703
729
704
730
TLI_DEFINE_VECFUNC(" asin" , " _ZGVsMxv_asin" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
705
731
TLI_DEFINE_VECFUNC(" asinf" , " _ZGVsMxv_asinf" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
732
+ TLI_DEFINE_VECFUNC(" llvm.asin.f64" , " _ZGVsMxv_asin" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
733
+ TLI_DEFINE_VECFUNC(" llvm.asin.f32" , " _ZGVsMxv_asinf" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
706
734
707
735
TLI_DEFINE_VECFUNC(" asinh" , " _ZGVsMxv_asinh" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
708
736
TLI_DEFINE_VECFUNC(" asinhf" , " _ZGVsMxv_asinhf" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
709
737
710
738
TLI_DEFINE_VECFUNC(" atan" , " _ZGVsMxv_atan" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
711
739
TLI_DEFINE_VECFUNC(" atanf" , " _ZGVsMxv_atanf" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
740
+ TLI_DEFINE_VECFUNC(" llvm.atan.f64" , " _ZGVsMxv_atan" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
741
+ TLI_DEFINE_VECFUNC(" llvm.atan.f32" , " _ZGVsMxv_atanf" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
712
742
713
743
TLI_DEFINE_VECFUNC(" atan2" , " _ZGVsMxvv_atan2" , SCALABLE(2 ), MASKED, "_ZGVsMxvv")
714
744
TLI_DEFINE_VECFUNC(" atan2f" , " _ZGVsMxvv_atan2f" , SCALABLE(4 ), MASKED, "_ZGVsMxvv")
@@ -729,6 +759,8 @@ TLI_DEFINE_VECFUNC("llvm.cos.f32", "_ZGVsMxv_cosf", SCALABLE(4), MASKED, "_ZGVsM
729
759
730
760
TLI_DEFINE_VECFUNC(" cosh" , " _ZGVsMxv_cosh" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
731
761
TLI_DEFINE_VECFUNC(" coshf" , " _ZGVsMxv_coshf" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
762
+ TLI_DEFINE_VECFUNC(" llvm.cosh.f64" , " _ZGVsMxv_cosh" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
763
+ TLI_DEFINE_VECFUNC(" llvm.cosh.f32" , " _ZGVsMxv_coshf" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
732
764
733
765
TLI_DEFINE_VECFUNC(" cospi" , " _ZGVsMxv_cospi" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
734
766
TLI_DEFINE_VECFUNC(" cospif" , " _ZGVsMxv_cospif" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
@@ -826,6 +858,8 @@ TLI_DEFINE_VECFUNC("sincospif", "_ZGVsNxvl4l4_sincospif", SCALABLE(4), NOMASK, "
826
858
827
859
TLI_DEFINE_VECFUNC(" sinh" , " _ZGVsMxv_sinh" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
828
860
TLI_DEFINE_VECFUNC(" sinhf" , " _ZGVsMxv_sinhf" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
861
+ TLI_DEFINE_VECFUNC(" llvm.sinh.f64" , " _ZGVsMxv_sinh" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
862
+ TLI_DEFINE_VECFUNC(" llvm.sinh.f32" , " _ZGVsMxv_sinhf" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
829
863
830
864
TLI_DEFINE_VECFUNC(" sinpi" , " _ZGVsMxv_sinpi" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
831
865
TLI_DEFINE_VECFUNC(" sinpif" , " _ZGVsMxv_sinpif" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
@@ -840,6 +874,8 @@ TLI_DEFINE_VECFUNC("llvm.tan.f32", "_ZGVsMxv_tanf", SCALABLE(4), MASKED, "_ZGVsM
840
874
841
875
TLI_DEFINE_VECFUNC(" tanh" , " _ZGVsMxv_tanh" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
842
876
TLI_DEFINE_VECFUNC(" tanhf" , " _ZGVsMxv_tanhf" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
877
+ TLI_DEFINE_VECFUNC(" llvm.tanh.f64" , " _ZGVsMxv_tanh" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
878
+ TLI_DEFINE_VECFUNC(" llvm.tanh.f32" , " _ZGVsMxv_tanhf" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
843
879
844
880
TLI_DEFINE_VECFUNC(" tgamma" , " _ZGVsMxv_tgamma" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
845
881
TLI_DEFINE_VECFUNC(" tgammaf" , " _ZGVsMxv_tgammaf" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
@@ -851,6 +887,11 @@ TLI_DEFINE_VECFUNC("acosf", "armpl_vacosq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v
851
887
TLI_DEFINE_VECFUNC(" acos" , " armpl_svacos_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
852
888
TLI_DEFINE_VECFUNC(" acosf" , " armpl_svacos_f32_x" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
853
889
890
+ TLI_DEFINE_VECFUNC(" llvm.acos.f64" , " armpl_vacosq_f64" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
891
+ TLI_DEFINE_VECFUNC(" llvm.acos.f32" , " armpl_vacosq_f32" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
892
+ TLI_DEFINE_VECFUNC(" llvm.acos.f64" , " armpl_svacos_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
893
+ TLI_DEFINE_VECFUNC(" llvm.acos.f32" , " armpl_svacos_f32_x" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
894
+
854
895
TLI_DEFINE_VECFUNC(" acosh" , " armpl_vacoshq_f64" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
855
896
TLI_DEFINE_VECFUNC(" acoshf" , " armpl_vacoshq_f32" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
856
897
TLI_DEFINE_VECFUNC(" acosh" , " armpl_svacosh_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
@@ -861,6 +902,11 @@ TLI_DEFINE_VECFUNC("asinf", "armpl_vasinq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v
861
902
TLI_DEFINE_VECFUNC(" asin" , " armpl_svasin_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
862
903
TLI_DEFINE_VECFUNC(" asinf" , " armpl_svasin_f32_x" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
863
904
905
+ TLI_DEFINE_VECFUNC(" llvm.asin.f64" , " armpl_vasinq_f64" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
906
+ TLI_DEFINE_VECFUNC(" llvm.asin.f32" , " armpl_vasinq_f32" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
907
+ TLI_DEFINE_VECFUNC(" llvm.asin.f64" , " armpl_svasin_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
908
+ TLI_DEFINE_VECFUNC(" llvm.asin.f32" , " armpl_svasin_f32_x" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
909
+
864
910
TLI_DEFINE_VECFUNC(" asinh" , " armpl_vasinhq_f64" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
865
911
TLI_DEFINE_VECFUNC(" asinhf" , " armpl_vasinhq_f32" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
866
912
TLI_DEFINE_VECFUNC(" asinh" , " armpl_svasinh_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
@@ -871,6 +917,11 @@ TLI_DEFINE_VECFUNC("atanf", "armpl_vatanq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v
871
917
TLI_DEFINE_VECFUNC(" atan" , " armpl_svatan_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
872
918
TLI_DEFINE_VECFUNC(" atanf" , " armpl_svatan_f32_x" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
873
919
920
+ TLI_DEFINE_VECFUNC(" llvm.atan.f64" , " armpl_vatanq_f64" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
921
+ TLI_DEFINE_VECFUNC(" llvm.atan.f32" , " armpl_vatanq_f32" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
922
+ TLI_DEFINE_VECFUNC(" llvm.atan.f64" , " armpl_svatan_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
923
+ TLI_DEFINE_VECFUNC(" llvm.atan.f32" , " armpl_svatan_f32_x" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
924
+
874
925
TLI_DEFINE_VECFUNC(" atan2" , " armpl_vatan2q_f64" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2vv")
875
926
TLI_DEFINE_VECFUNC(" atan2f" , " armpl_vatan2q_f32" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4vv")
876
927
TLI_DEFINE_VECFUNC(" atan2" , " armpl_svatan2_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxvv")
@@ -906,6 +957,11 @@ TLI_DEFINE_VECFUNC("coshf", "armpl_vcoshq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v
906
957
TLI_DEFINE_VECFUNC(" cosh" , " armpl_svcosh_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
907
958
TLI_DEFINE_VECFUNC(" coshf" , " armpl_svcosh_f32_x" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
908
959
960
+ TLI_DEFINE_VECFUNC(" llvm.cosh.f64" , " armpl_vcoshq_f64" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
961
+ TLI_DEFINE_VECFUNC(" llvm.cosh.f32" , " armpl_vcoshq_f32" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
962
+ TLI_DEFINE_VECFUNC(" llvm.cosh.f64" , " armpl_svcosh_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
963
+ TLI_DEFINE_VECFUNC(" llvm.cosh.f32" , " armpl_svcosh_f32_x" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
964
+
909
965
TLI_DEFINE_VECFUNC(" cospi" , " armpl_vcospiq_f64" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
910
966
TLI_DEFINE_VECFUNC(" cospif" , " armpl_vcospiq_f32" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
911
967
TLI_DEFINE_VECFUNC(" cospi" , " armpl_svcospi_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
@@ -1081,6 +1137,11 @@ TLI_DEFINE_VECFUNC("sinhf", "armpl_vsinhq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v
1081
1137
TLI_DEFINE_VECFUNC(" sinh" , " armpl_svsinh_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
1082
1138
TLI_DEFINE_VECFUNC(" sinhf" , " armpl_svsinh_f32_x" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
1083
1139
1140
+ TLI_DEFINE_VECFUNC(" llvm.sinh.f64" , " armpl_vsinhq_f64" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1141
+ TLI_DEFINE_VECFUNC(" llvm.sinh.f32" , " armpl_vsinhq_f32" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1142
+ TLI_DEFINE_VECFUNC(" llvm.sinh.f64" , " armpl_svsinh_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
1143
+ TLI_DEFINE_VECFUNC(" llvm.sinh.f32" , " armpl_svsinh_f32_x" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
1144
+
1084
1145
TLI_DEFINE_VECFUNC(" sinpi" , " armpl_vsinpiq_f64" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1085
1146
TLI_DEFINE_VECFUNC(" sinpif" , " armpl_vsinpiq_f32" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1086
1147
TLI_DEFINE_VECFUNC(" sinpi" , " armpl_svsinpi_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
@@ -1106,6 +1167,11 @@ TLI_DEFINE_VECFUNC("tanhf", "armpl_vtanhq_f32", FIXED(4), NOMASK, "_ZGV_LLVM_N4v
1106
1167
TLI_DEFINE_VECFUNC(" tanh" , " armpl_svtanh_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
1107
1168
TLI_DEFINE_VECFUNC(" tanhf" , " armpl_svtanh_f32_x" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
1108
1169
1170
+ TLI_DEFINE_VECFUNC(" llvm.tanh.f64" , " armpl_vtanhq_f64" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1171
+ TLI_DEFINE_VECFUNC(" llvm.tanh.f32" , " armpl_vtanhq_f32" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1172
+ TLI_DEFINE_VECFUNC(" llvm.tanh.f64" , " armpl_svtanh_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
1173
+ TLI_DEFINE_VECFUNC(" llvm.tanh.f32" , " armpl_svtanh_f32_x" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
1174
+
1109
1175
TLI_DEFINE_VECFUNC(" tgamma" , " armpl_vtgammaq_f64" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1110
1176
TLI_DEFINE_VECFUNC(" tgammaf" , " armpl_vtgammaq_f32" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1111
1177
TLI_DEFINE_VECFUNC(" tgamma" , " armpl_svtgamma_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
0 commit comments