/usr/lib/gcc-cross/i686-linux-gnu/6/plugin/include/insn-flags.h is in gcc-6-plugin-dev-i686-linux-gnu 6.4.0-17ubuntu1cross1.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521 1522 1523 1524 1525 1526 1527 1528 1529 1530 1531 1532 1533 1534 1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570 1571 1572 1573 1574 1575 1576 1577 1578 1579 1580 1581 1582 1583 1584 1585 1586 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596 1597 1598 1599 1600 1601 1602 1603 1604 1605 1606 1607 1608 1609 1610 1611 1612 1613 1614 1615 1616 1617 1618 1619 1620 1621 1622 1623 1624 1625 1626 1627 1628 1629 1630 1631 1632 1633 1634 1635 1636 1637 1638 1639 1640 1641 1642 1643 1644 1645 1646 1647 1648 1649 1650 1651 1652 1653 1654 1655 1656 1657 1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684 1685 1686 1687 1688 1689 1690 1691 1692 1693 1694 1695 1696 1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716 1717 1718 1719 1720 1721 1722 1723 1724 1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737 1738 1739 1740 1741 1742 1743 1744 1745 1746 1747 1748 1749 1750 1751 1752 1753 1754 1755 1756 1757 1758 1759 1760 1761 1762 1763 1764 1765 1766 1767 1768 1769 1770 1771 1772 1773 1774 1775 1776 1777 1778 1779 1780 1781 1782 1783 1784 1785 1786 1787 1788 1789 1790 1791 1792 1793 1794 1795 1796 1797 1798 1799 1800 1801 1802 1803 1804 1805 1806 1807 1808 1809 1810 1811 1812 1813 1814 1815 1816 1817 1818 1819 1820 1821 1822 1823 1824 1825 1826 1827 1828 1829 1830 1831 1832 1833 1834 1835 1836 1837 1838 1839 1840 1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866 1867 1868 1869 1870 1871 1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884 1885 1886 1887 1888 1889 1890 1891 1892 1893 1894 1895 1896 1897 1898 1899 1900 1901 1902 1903 1904 1905 1906 1907 1908 1909 1910 1911 1912 1913 1914 1915 1916 1917 1918 1919 1920 1921 1922 1923 1924 1925 1926 1927 1928 1929 1930 1931 1932 1933 1934 1935 1936 1937 1938 1939 1940 1941 1942 1943 1944 1945 1946 1947 1948 1949 1950 1951 1952 1953 1954 1955 1956 1957 1958 1959 1960 1961 1962 1963 1964 1965 1966 1967 1968 1969 1970 1971 1972 1973 1974 1975 1976 1977 1978 1979 1980 1981 1982 1983 1984 1985 1986 1987 1988 1989 1990 1991 1992 1993 1994 1995 1996 1997 1998 1999 2000 2001 2002 2003 2004 2005 2006 2007 2008 2009 2010 2011 2012 2013 2014 2015 2016 2017 2018 2019 2020 2021 2022 2023 2024 2025 2026 2027 2028 2029 2030 2031 2032 2033 2034 2035 2036 2037 2038 2039 2040 2041 2042 2043 2044 2045 2046 2047 2048 2049 2050 2051 2052 2053 2054 2055 2056 2057 2058 2059 2060 2061 2062 2063 2064 2065 2066 2067 2068 2069 2070 2071 2072 2073 2074 2075 2076 2077 2078 2079 2080 2081 2082 2083 2084 2085 2086 2087 2088 2089 2090 2091 2092 2093 2094 2095 2096 2097 2098 2099 2100 2101 2102 2103 2104 2105 2106 2107 2108 2109 2110 2111 2112 2113 2114 2115 2116 2117 2118 2119 2120 2121 2122 2123 2124 2125 2126 2127 2128 2129 2130 2131 2132 2133 2134 2135 2136 2137 2138 2139 2140 2141 2142 2143 2144 2145 2146 2147 2148 2149 2150 2151 2152 2153 2154 2155 2156 2157 2158 2159 2160 2161 2162 2163 2164 2165 2166 2167 2168 2169 2170 2171 2172 2173 2174 2175 2176 2177 2178 2179 2180 2181 2182 2183 2184 2185 2186 2187 2188 2189 2190 2191 2192 2193 2194 2195 2196 2197 2198 2199 2200 2201 2202 2203 2204 2205 2206 2207 2208 2209 2210 2211 2212 2213 2214 2215 2216 2217 2218 2219 2220 2221 2222 2223 2224 2225 2226 2227 2228 2229 2230 2231 2232 2233 2234 2235 2236 2237 2238 2239 2240 2241 2242 2243 2244 2245 2246 2247 2248 2249 2250 2251 2252 2253 2254 2255 2256 2257 2258 2259 2260 2261 2262 2263 2264 2265 2266 2267 2268 2269 2270 2271 2272 2273 2274 2275 2276 2277 2278 2279 2280 2281 2282 2283 2284 2285 2286 2287 2288 2289 2290 2291 2292 2293 2294 2295 2296 2297 2298 2299 2300 2301 2302 2303 2304 2305 2306 2307 2308 2309 2310 2311 2312 2313 2314 2315 2316 2317 2318 2319 2320 2321 2322 2323 2324 2325 2326 2327 2328 2329 2330 2331 2332 2333 2334 2335 2336 2337 2338 2339 2340 2341 2342 2343 2344 2345 2346 2347 2348 2349 2350 2351 2352 2353 2354 2355 2356 2357 2358 2359 2360 2361 2362 2363 2364 2365 2366 2367 2368 2369 2370 2371 2372 2373 2374 2375 2376 2377 2378 2379 2380 2381 2382 2383 2384 2385 2386 2387 2388 2389 2390 2391 2392 2393 2394 2395 2396 2397 2398 2399 2400 2401 2402 2403 2404 2405 2406 2407 2408 2409 2410 2411 2412 2413 2414 2415 2416 2417 2418 2419 2420 2421 2422 2423 2424 2425 2426 2427 2428 2429 2430 2431 2432 2433 2434 2435 2436 2437 2438 2439 2440 2441 2442 2443 2444 2445 2446 2447 2448 2449 2450 2451 2452 2453 2454 2455 2456 2457 2458 2459 2460 2461 2462 2463 2464 2465 2466 2467 2468 2469 2470 2471 2472 2473 2474 2475 2476 2477 2478 2479 2480 2481 2482 2483 2484 2485 2486 2487 2488 2489 2490 2491 2492 2493 2494 2495 2496 2497 2498 2499 2500 2501 2502 2503 2504 2505 2506 2507 2508 2509 2510 2511 2512 2513 2514 2515 2516 2517 2518 2519 2520 2521 2522 2523 2524 2525 2526 2527 2528 2529 2530 2531 2532 2533 2534 2535 2536 2537 2538 2539 2540 2541 2542 2543 2544 2545 2546 2547 2548 2549 2550 2551 2552 2553 2554 2555 2556 2557 2558 2559 2560 2561 2562 2563 2564 2565 2566 2567 2568 2569 2570 2571 2572 2573 2574 2575 2576 2577 2578 2579 2580 2581 2582 2583 2584 2585 2586 2587 2588 2589 2590 2591 2592 2593 2594 2595 2596 2597 2598 2599 2600 2601 2602 2603 2604 2605 2606 2607 2608 2609 2610 2611 2612 2613 2614 2615 2616 2617 2618 2619 2620 2621 2622 2623 2624 2625 2626 2627 2628 2629 2630 2631 2632 2633 2634 2635 2636 2637 2638 2639 2640 2641 2642 2643 2644 2645 2646 2647 2648 2649 2650 2651 2652 2653 2654 2655 2656 2657 2658 2659 2660 2661 2662 2663 2664 2665 2666 2667 2668 2669 2670 2671 2672 2673 2674 2675 2676 2677 2678 2679 2680 2681 2682 2683 2684 2685 2686 2687 2688 2689 2690 2691 2692 2693 2694 2695 2696 2697 2698 2699 2700 2701 2702 2703 2704 2705 2706 2707 2708 2709 2710 2711 2712 2713 2714 2715 2716 2717 2718 2719 2720 2721 2722 2723 2724 2725 2726 2727 2728 2729 2730 2731 2732 2733 2734 2735 2736 2737 2738 2739 2740 2741 2742 2743 2744 2745 2746 2747 2748 2749 2750 2751 2752 2753 2754 2755 2756 2757 2758 2759 2760 2761 2762 2763 2764 2765 2766 2767 2768 2769 2770 2771 2772 2773 2774 2775 2776 2777 2778 2779 2780 2781 2782 2783 2784 2785 2786 2787 2788 2789 2790 2791 2792 2793 2794 2795 2796 2797 2798 2799 2800 2801 2802 2803 2804 2805 2806 2807 2808 2809 2810 2811 2812 2813 2814 2815 2816 2817 2818 2819 2820 2821 2822 2823 2824 2825 2826 2827 2828 2829 2830 2831 2832 2833 2834 2835 2836 2837 2838 2839 2840 2841 2842 2843 2844 2845 2846 2847 2848 2849 2850 2851 2852 2853 2854 2855 2856 2857 2858 2859 2860 2861 2862 2863 2864 2865 2866 2867 2868 2869 2870 2871 2872 2873 2874 2875 2876 2877 2878 2879 2880 2881 2882 2883 2884 2885 2886 2887 2888 2889 2890 2891 2892 2893 2894 2895 2896 2897 2898 2899 2900 2901 2902 2903 2904 2905 2906 2907 2908 2909 2910 2911 2912 2913 2914 2915 2916 2917 2918 2919 2920 2921 2922 2923 2924 2925 2926 2927 2928 2929 2930 2931 2932 2933 2934 2935 2936 2937 2938 2939 2940 2941 2942 2943 2944 2945 2946 2947 2948 2949 2950 2951 2952 2953 2954 2955 2956 2957 2958 2959 2960 2961 2962 2963 2964 2965 2966 2967 2968 2969 2970 2971 2972 2973 2974 2975 2976 2977 2978 2979 2980 2981 2982 2983 2984 2985 2986 2987 2988 2989 2990 2991 2992 2993 2994 2995 2996 2997 2998 2999 3000 3001 3002 3003 3004 3005 3006 3007 3008 3009 3010 3011 3012 3013 3014 3015 3016 3017 3018 3019 3020 3021 3022 3023 3024 3025 3026 3027 3028 3029 3030 3031 3032 3033 3034 3035 3036 3037 3038 3039 3040 3041 3042 3043 3044 3045 3046 3047 3048 3049 3050 3051 3052 3053 3054 3055 3056 3057 3058 3059 3060 3061 3062 3063 3064 3065 3066 3067 3068 3069 3070 3071 3072 3073 3074 3075 3076 3077 3078 3079 3080 3081 3082 3083 3084 3085 3086 3087 3088 3089 3090 3091 3092 3093 3094 3095 3096 3097 3098 3099 3100 3101 3102 3103 3104 3105 3106 3107 3108 3109 3110 3111 3112 3113 3114 3115 3116 3117 3118 3119 3120 3121 3122 3123 3124 3125 3126 3127 3128 3129 3130 3131 3132 3133 3134 3135 3136 3137 3138 3139 3140 3141 3142 3143 3144 3145 3146 3147 3148 3149 3150 3151 3152 3153 3154 3155 3156 3157 3158 3159 3160 3161 3162 3163 3164 3165 3166 3167 3168 3169 3170 3171 3172 3173 3174 3175 3176 3177 3178 3179 3180 3181 3182 3183 3184 3185 3186 3187 3188 3189 3190 3191 3192 3193 3194 3195 3196 3197 3198 3199 3200 3201 3202 3203 3204 3205 3206 3207 3208 3209 3210 3211 3212 3213 3214 3215 3216 3217 3218 3219 3220 3221 3222 3223 3224 3225 3226 3227 3228 3229 3230 3231 3232 3233 3234 3235 3236 3237 3238 3239 3240 3241 3242 3243 3244 3245 3246 3247 3248 3249 3250 3251 3252 3253 3254 3255 3256 3257 3258 3259 3260 3261 3262 3263 3264 3265 3266 3267 3268 3269 3270 3271 3272 3273 3274 3275 3276 3277 3278 3279 3280 3281 3282 3283 3284 3285 3286 3287 3288 3289 3290 3291 3292 3293 3294 3295 3296 3297 3298 3299 3300 3301 3302 3303 3304 3305 3306 3307 3308 3309 3310 3311 3312 3313 3314 3315 3316 3317 3318 3319 3320 3321 3322 3323 3324 3325 3326 3327 3328 3329 3330 3331 3332 3333 3334 3335 3336 3337 3338 3339 3340 3341 3342 3343 3344 3345 3346 3347 3348 3349 3350 3351 3352 3353 3354 3355 3356 3357 3358 3359 3360 3361 3362 3363 3364 3365 3366 3367 3368 3369 3370 3371 3372 3373 3374 3375 3376 3377 3378 3379 3380 3381 3382 3383 3384 3385 3386 3387 3388 3389 3390 3391 3392 3393 3394 3395 3396 3397 3398 3399 3400 3401 3402 3403 3404 3405 3406 3407 3408 3409 3410 3411 3412 3413 3414 3415 3416 3417 3418 3419 3420 3421 3422 3423 3424 3425 3426 3427 3428 3429 3430 3431 3432 3433 3434 3435 3436 3437 3438 3439 3440 3441 3442 3443 3444 3445 3446 3447 3448 3449 3450 3451 3452 3453 3454 3455 3456 3457 3458 3459 3460 3461 3462 3463 3464 3465 3466 3467 3468 3469 3470 3471 3472 3473 3474 3475 3476 3477 3478 3479 3480 3481 3482 3483 3484 3485 3486 3487 3488 3489 3490 3491 3492 3493 3494 3495 3496 3497 3498 3499 3500 3501 3502 3503 3504 3505 3506 3507 3508 3509 3510 3511 3512 3513 3514 3515 3516 3517 3518 3519 3520 3521 3522 3523 3524 3525 3526 3527 3528 3529 3530 3531 3532 3533 3534 3535 3536 3537 3538 3539 3540 3541 3542 3543 3544 3545 3546 3547 3548 3549 3550 3551 3552 3553 3554 3555 3556 3557 3558 3559 3560 3561 3562 3563 3564 3565 3566 3567 3568 3569 3570 3571 3572 3573 3574 3575 3576 3577 3578 3579 3580 3581 3582 3583 3584 3585 3586 3587 3588 3589 3590 3591 3592 3593 3594 3595 3596 3597 3598 3599 3600 3601 3602 3603 3604 3605 3606 3607 3608 3609 3610 3611 3612 3613 3614 3615 3616 3617 3618 3619 3620 3621 3622 3623 3624 3625 3626 3627 3628 3629 3630 3631 3632 3633 3634 3635 3636 3637 3638 3639 3640 3641 3642 3643 3644 3645 3646 3647 3648 3649 3650 3651 3652 3653 3654 3655 3656 3657 3658 3659 3660 3661 3662 3663 3664 3665 3666 3667 3668 3669 3670 3671 3672 3673 3674 3675 3676 3677 3678 3679 3680 3681 3682 3683 3684 3685 3686 3687 3688 3689 3690 3691 3692 3693 3694 3695 3696 3697 3698 3699 3700 3701 3702 3703 3704 3705 3706 3707 3708 3709 3710 3711 3712 3713 3714 3715 3716 3717 3718 3719 3720 3721 3722 3723 3724 3725 3726 3727 3728 3729 3730 3731 3732 3733 3734 3735 3736 3737 3738 3739 3740 3741 3742 3743 3744 3745 3746 3747 3748 3749 3750 3751 3752 3753 3754 3755 3756 3757 3758 3759 3760 3761 3762 3763 3764 3765 3766 3767 3768 3769 3770 3771 3772 3773 3774 3775 3776 3777 3778 3779 3780 3781 3782 3783 3784 3785 3786 3787 3788 3789 3790 3791 3792 3793 3794 3795 3796 3797 3798 3799 3800 3801 3802 3803 3804 3805 3806 3807 3808 3809 3810 3811 3812 3813 3814 3815 3816 3817 3818 3819 3820 3821 3822 3823 3824 3825 3826 3827 3828 3829 3830 3831 3832 3833 3834 3835 3836 3837 3838 3839 3840 3841 3842 3843 3844 3845 3846 3847 3848 3849 3850 3851 3852 3853 3854 3855 3856 3857 3858 3859 3860 3861 3862 3863 3864 3865 3866 3867 3868 3869 3870 3871 3872 3873 3874 3875 3876 3877 3878 3879 3880 3881 3882 3883 3884 3885 3886 3887 3888 3889 3890 3891 3892 3893 3894 3895 3896 3897 3898 3899 3900 3901 3902 3903 3904 3905 3906 3907 3908 3909 3910 3911 3912 3913 3914 3915 3916 3917 3918 3919 3920 3921 3922 3923 3924 3925 3926 3927 3928 3929 3930 3931 3932 3933 3934 3935 3936 3937 3938 3939 3940 3941 3942 3943 3944 3945 3946 3947 3948 3949 3950 3951 3952 3953 3954 3955 3956 3957 3958 3959 3960 3961 3962 3963 3964 3965 3966 3967 3968 3969 3970 3971 3972 3973 3974 3975 3976 3977 3978 3979 3980 3981 3982 3983 3984 3985 3986 3987 3988 3989 3990 3991 3992 3993 3994 3995 3996 3997 3998 3999 4000 4001 4002 4003 4004 4005 4006 4007 4008 4009 4010 4011 4012 4013 4014 4015 4016 4017 4018 4019 4020 4021 4022 4023 4024 4025 4026 4027 4028 4029 4030 4031 4032 4033 4034 4035 4036 4037 4038 4039 4040 4041 4042 4043 4044 4045 4046 4047 4048 4049 4050 4051 4052 4053 4054 4055 4056 4057 4058 4059 4060 4061 4062 4063 4064 4065 4066 4067 4068 4069 4070 4071 4072 4073 4074 4075 4076 4077 4078 4079 4080 4081 4082 4083 4084 4085 4086 4087 4088 4089 4090 4091 4092 4093 4094 4095 4096 4097 4098 4099 4100 4101 4102 4103 4104 4105 4106 4107 4108 4109 4110 4111 4112 4113 4114 4115 4116 4117 4118 4119 4120 4121 4122 4123 4124 4125 4126 4127 4128 4129 4130 4131 4132 4133 4134 4135 4136 4137 4138 4139 4140 4141 4142 4143 4144 4145 4146 4147 4148 4149 4150 4151 4152 4153 4154 4155 4156 4157 4158 4159 4160 4161 4162 4163 4164 4165 4166 4167 4168 4169 4170 4171 4172 4173 4174 4175 4176 4177 4178 4179 4180 4181 4182 4183 4184 4185 4186 4187 4188 4189 4190 4191 4192 4193 4194 4195 4196 4197 4198 4199 4200 4201 4202 4203 4204 4205 4206 4207 4208 4209 4210 4211 4212 4213 4214 4215 4216 4217 4218 4219 4220 4221 4222 4223 4224 4225 4226 4227 4228 4229 4230 4231 4232 4233 4234 4235 4236 4237 4238 4239 4240 4241 4242 4243 4244 4245 4246 4247 4248 4249 4250 4251 4252 4253 4254 4255 4256 4257 4258 4259 4260 4261 4262 4263 4264 4265 4266 4267 4268 4269 4270 4271 4272 4273 4274 4275 4276 4277 4278 4279 4280 4281 4282 4283 4284 4285 4286 4287 4288 4289 4290 4291 4292 4293 4294 4295 4296 4297 4298 4299 4300 4301 4302 4303 4304 4305 4306 4307 4308 4309 4310 4311 4312 4313 4314 4315 4316 4317 4318 4319 4320 4321 4322 4323 4324 4325 4326 4327 4328 4329 4330 4331 4332 4333 4334 4335 4336 4337 4338 4339 4340 4341 4342 4343 4344 4345 4346 4347 4348 4349 4350 4351 4352 4353 4354 4355 4356 4357 4358 4359 4360 4361 4362 4363 4364 4365 4366 4367 4368 4369 4370 4371 4372 4373 4374 4375 4376 4377 4378 4379 4380 4381 4382 4383 4384 4385 4386 4387 4388 4389 4390 4391 4392 4393 4394 4395 4396 4397 4398 4399 4400 4401 4402 4403 4404 4405 4406 4407 4408 4409 4410 4411 4412 4413 4414 4415 4416 4417 4418 4419 4420 4421 4422 4423 4424 4425 4426 4427 4428 4429 4430 4431 4432 4433 4434 4435 4436 4437 4438 4439 4440 4441 4442 4443 4444 4445 4446 4447 4448 4449 4450 4451 4452 4453 4454 4455 4456 4457 4458 4459 4460 4461 4462 4463 4464 4465 4466 4467 4468 4469 4470 4471 4472 4473 4474 4475 4476 4477 4478 4479 4480 4481 4482 4483 4484 4485 4486 4487 4488 4489 4490 4491 4492 4493 4494 4495 4496 4497 4498 4499 4500 4501 4502 4503 4504 4505 4506 4507 4508 4509 4510 4511 4512 4513 4514 4515 4516 4517 4518 4519 4520 4521 4522 4523 4524 4525 4526 4527 4528 4529 4530 4531 4532 4533 4534 4535 4536 4537 4538 4539 4540 4541 4542 4543 4544 4545 4546 4547 4548 4549 4550 4551 4552 4553 4554 4555 4556 4557 4558 4559 4560 4561 4562 4563 4564 4565 4566 4567 4568 4569 4570 4571 4572 4573 4574 4575 4576 4577 4578 4579 4580 4581 4582 4583 4584 4585 4586 4587 4588 4589 4590 4591 4592 4593 4594 4595 4596 4597 4598 4599 4600 4601 4602 4603 4604 4605 4606 4607 4608 4609 4610 4611 4612 4613 4614 4615 4616 4617 4618 4619 4620 4621 4622 4623 4624 4625 4626 4627 4628 4629 4630 4631 4632 4633 4634 4635 4636 4637 4638 4639 4640 4641 4642 4643 4644 4645 4646 4647 4648 4649 4650 4651 4652 4653 4654 4655 4656 4657 4658 4659 4660 4661 4662 4663 4664 4665 4666 4667 4668 4669 4670 4671 4672 4673 4674 4675 4676 4677 4678 4679 4680 4681 4682 4683 4684 4685 4686 4687 4688 4689 4690 4691 4692 4693 4694 4695 4696 4697 4698 4699 4700 4701 4702 4703 4704 4705 4706 4707 4708 4709 4710 4711 4712 4713 4714 4715 4716 4717 4718 4719 4720 4721 4722 4723 4724 4725 4726 4727 4728 4729 4730 4731 4732 4733 4734 4735 4736 4737 4738 4739 4740 4741 4742 4743 4744 4745 4746 4747 4748 4749 4750 4751 4752 4753 4754 4755 4756 4757 4758 4759 4760 4761 4762 4763 4764 4765 4766 4767 4768 4769 4770 4771 4772 4773 4774 4775 4776 4777 4778 4779 4780 4781 4782 4783 4784 4785 4786 4787 4788 4789 4790 4791 4792 4793 4794 4795 4796 4797 4798 4799 4800 4801 4802 4803 4804 4805 4806 4807 4808 4809 4810 4811 4812 4813 4814 4815 4816 4817 4818 4819 4820 4821 4822 4823 4824 4825 4826 4827 4828 4829 4830 4831 4832 4833 4834 4835 4836 4837 4838 4839 4840 4841 4842 4843 4844 4845 4846 4847 4848 4849 4850 4851 4852 4853 4854 4855 4856 4857 4858 4859 4860 4861 4862 4863 4864 4865 4866 4867 4868 4869 4870 4871 4872 4873 4874 4875 4876 4877 4878 4879 4880 4881 4882 4883 4884 4885 4886 4887 4888 4889 4890 4891 4892 4893 4894 4895 4896 4897 4898 4899 4900 4901 4902 4903 4904 4905 4906 4907 4908 4909 4910 4911 4912 4913 4914 4915 4916 4917 4918 4919 4920 4921 4922 4923 4924 4925 4926 4927 4928 4929 4930 4931 4932 4933 4934 4935 4936 4937 4938 4939 4940 4941 4942 4943 4944 4945 4946 4947 4948 4949 4950 4951 4952 4953 4954 4955 4956 4957 4958 4959 4960 4961 4962 4963 4964 4965 4966 4967 4968 4969 4970 4971 4972 4973 4974 4975 4976 4977 4978 4979 4980 4981 4982 4983 4984 4985 4986 4987 4988 4989 4990 4991 4992 4993 4994 4995 4996 4997 4998 4999 5000 5001 5002 5003 5004 5005 5006 5007 5008 5009 5010 5011 5012 5013 5014 5015 5016 5017 5018 5019 5020 5021 5022 5023 5024 5025 5026 5027 5028 5029 5030 5031 5032 5033 5034 5035 5036 5037 5038 5039 5040 5041 5042 5043 5044 5045 5046 5047 5048 5049 5050 5051 5052 5053 5054 5055 5056 5057 5058 5059 5060 5061 5062 5063 5064 5065 5066 5067 5068 5069 5070 5071 5072 5073 5074 5075 5076 5077 5078 5079 5080 5081 5082 5083 5084 5085 5086 5087 5088 5089 5090 5091 5092 5093 5094 5095 5096 5097 5098 5099 5100 5101 5102 5103 5104 5105 5106 5107 5108 5109 5110 5111 5112 5113 5114 5115 5116 5117 5118 5119 5120 5121 5122 5123 5124 5125 5126 5127 5128 5129 5130 5131 5132 5133 5134 5135 5136 5137 5138 5139 5140 5141 5142 5143 5144 5145 5146 5147 5148 5149 5150 5151 5152 5153 5154 5155 5156 5157 5158 5159 5160 5161 5162 5163 5164 5165 5166 5167 5168 5169 5170 5171 5172 5173 5174 5175 5176 5177 5178 5179 5180 5181 5182 5183 5184 5185 5186 5187 5188 5189 5190 5191 5192 5193 5194 5195 5196 5197 5198 5199 5200 5201 5202 5203 5204 5205 5206 5207 5208 5209 5210 5211 5212 5213 5214 5215 5216 5217 5218 5219 5220 5221 5222 5223 5224 5225 5226 5227 5228 5229 5230 5231 5232 5233 5234 5235 5236 5237 5238 5239 5240 5241 5242 5243 5244 5245 5246 5247 5248 5249 5250 5251 5252 5253 5254 5255 5256 5257 5258 5259 5260 5261 5262 5263 5264 5265 5266 5267 5268 5269 5270 5271 5272 5273 5274 5275 5276 5277 5278 5279 5280 5281 5282 5283 5284 5285 5286 5287 5288 5289 5290 5291 5292 5293 5294 5295 5296 5297 5298 5299 5300 5301 5302 5303 5304 5305 5306 5307 5308 5309 5310 5311 5312 5313 5314 5315 5316 5317 5318 5319 5320 5321 5322 5323 5324 5325 5326 5327 5328 5329 5330 5331 5332 5333 5334 5335 5336 5337 5338 5339 5340 5341 5342 5343 5344 5345 5346 5347 5348 5349 5350 5351 5352 5353 5354 5355 5356 5357 5358 5359 5360 5361 5362 5363 5364 5365 5366 5367 5368 5369 5370 5371 5372 5373 5374 5375 5376 5377 5378 5379 5380 5381 5382 5383 5384 5385 5386 5387 5388 5389 5390 5391 5392 5393 5394 5395 5396 5397 5398 5399 5400 5401 5402 5403 5404 5405 5406 5407 5408 5409 5410 5411 5412 5413 5414 5415 5416 5417 5418 5419 5420 5421 5422 5423 5424 5425 5426 5427 5428 5429 5430 5431 5432 5433 5434 5435 5436 5437 5438 5439 5440 5441 5442 5443 5444 5445 5446 5447 5448 5449 5450 5451 5452 5453 5454 5455 5456 5457 5458 5459 5460 5461 5462 5463 5464 5465 5466 5467 5468 5469 5470 5471 5472 5473 5474 5475 5476 5477 5478 5479 5480 5481 5482 5483 5484 5485 5486 5487 5488 5489 5490 5491 5492 5493 5494 5495 5496 5497 5498 5499 5500 5501 5502 5503 5504 5505 5506 5507 5508 5509 5510 5511 5512 5513 5514 5515 5516 5517 5518 5519 5520 5521 5522 5523 5524 5525 5526 5527 5528 5529 5530 5531 5532 5533 5534 5535 5536 5537 5538 5539 5540 5541 5542 5543 5544 5545 5546 5547 5548 5549 5550 5551 5552 5553 5554 5555 5556 5557 5558 5559 5560 5561 5562 5563 5564 5565 5566 5567 5568 5569 5570 5571 5572 5573 5574 5575 5576 5577 5578 5579 5580 5581 5582 5583 5584 5585 5586 5587 5588 5589 5590 5591 5592 5593 5594 5595 5596 5597 5598 5599 5600 5601 5602 5603 5604 5605 5606 5607 5608 5609 5610 5611 5612 5613 5614 5615 5616 5617 5618 5619 5620 5621 5622 5623 5624 5625 5626 5627 5628 5629 5630 5631 5632 5633 5634 5635 5636 5637 5638 5639 5640 5641 5642 5643 5644 5645 5646 5647 5648 5649 5650 5651 5652 5653 5654 5655 5656 5657 5658 5659 5660 5661 5662 5663 5664 5665 5666 5667 5668 5669 5670 5671 5672 5673 5674 5675 5676 5677 5678 5679 5680 5681 5682 5683 5684 5685 5686 5687 5688 5689 5690 5691 5692 5693 5694 5695 5696 5697 5698 5699 5700 5701 5702 5703 5704 5705 5706 5707 5708 5709 5710 5711 5712 5713 5714 5715 5716 5717 5718 5719 5720 5721 5722 5723 5724 5725 5726 5727 5728 5729 5730 5731 5732 5733 5734 5735 5736 5737 5738 5739 5740 5741 5742 5743 5744 5745 5746 5747 5748 5749 5750 5751 5752 5753 5754 5755 5756 5757 5758 5759 5760 5761 5762 5763 5764 5765 5766 5767 5768 5769 5770 5771 5772 5773 5774 5775 5776 5777 5778 5779 5780 5781 5782 5783 5784 5785 5786 5787 5788 5789 5790 5791 5792 5793 5794 5795 5796 5797 5798 5799 5800 5801 5802 5803 5804 5805 5806 5807 5808 5809 5810 5811 5812 5813 5814 5815 5816 5817 5818 5819 5820 5821 5822 5823 5824 5825 5826 5827 5828 5829 5830 5831 5832 5833 5834 5835 5836 5837 5838 5839 5840 5841 5842 5843 5844 5845 5846 5847 5848 5849 5850 5851 5852 5853 5854 5855 5856 5857 5858 5859 5860 5861 5862 5863 5864 5865 5866 5867 5868 5869 5870 5871 5872 5873 5874 5875 5876 5877 5878 5879 5880 5881 5882 5883 5884 5885 5886 5887 5888 5889 5890 5891 5892 5893 5894 5895 5896 5897 5898 5899 5900 5901 5902 5903 5904 5905 5906 5907 5908 5909 5910 5911 5912 5913 5914 5915 5916 5917 5918 5919 5920 5921 5922 5923 5924 5925 5926 5927 5928 5929 5930 5931 5932 5933 5934 5935 5936 5937 5938 5939 5940 5941 5942 5943 5944 5945 5946 5947 5948 5949 5950 5951 5952 5953 5954 5955 5956 5957 5958 5959 5960 5961 5962 5963 5964 5965 5966 5967 5968 5969 5970 5971 5972 5973 5974 5975 5976 5977 5978 5979 5980 5981 5982 5983 5984 5985 5986 5987 5988 5989 5990 5991 5992 5993 5994 5995 5996 5997 5998 5999 6000 6001 6002 6003 6004 6005 6006 6007 6008 6009 6010 6011 6012 6013 6014 6015 6016 6017 6018 6019 6020 6021 6022 6023 6024 6025 6026 6027 6028 6029 6030 6031 6032 6033 6034 6035 6036 6037 6038 6039 6040 6041 6042 6043 6044 6045 6046 6047 6048 6049 6050 6051 6052 6053 6054 6055 6056 6057 6058 6059 6060 6061 6062 6063 6064 6065 6066 6067 6068 6069 6070 6071 6072 6073 6074 6075 6076 6077 6078 6079 6080 6081 6082 6083 6084 6085 6086 6087 6088 6089 6090 6091 6092 6093 6094 6095 6096 6097 6098 6099 6100 6101 6102 6103 6104 6105 6106 6107 6108 6109 6110 6111 6112 6113 6114 6115 6116 6117 6118 6119 6120 6121 6122 6123 6124 6125 6126 6127 6128 6129 6130 6131 6132 6133 6134 6135 6136 6137 6138 6139 6140 6141 6142 6143 6144 6145 6146 6147 6148 6149 6150 6151 6152 6153 6154 6155 6156 6157 6158 6159 6160 6161 6162 6163 6164 6165 6166 6167 6168 6169 6170 6171 6172 6173 6174 6175 6176 6177 6178 6179 6180 6181 6182 6183 6184 6185 6186 6187 6188 6189 6190 6191 6192 6193 6194 6195 6196 6197 6198 6199 6200 6201 6202 6203 6204 6205 6206 6207 6208 6209 6210 6211 6212 6213 6214 6215 6216 6217 6218 6219 6220 6221 6222 6223 6224 6225 6226 6227 6228 6229 6230 6231 6232 6233 6234 6235 6236 6237 6238 6239 6240 6241 6242 6243 6244 6245 6246 6247 6248 6249 6250 6251 6252 6253 6254 6255 6256 6257 6258 6259 6260 6261 6262 6263 6264 6265 6266 6267 6268 6269 6270 6271 6272 6273 6274 6275 6276 6277 6278 6279 6280 6281 6282 6283 6284 6285 6286 6287 6288 6289 6290 6291 6292 6293 6294 6295 6296 6297 6298 6299 6300 6301 6302 6303 6304 6305 6306 6307 6308 6309 6310 6311 6312 6313 6314 6315 6316 6317 6318 6319 6320 6321 6322 6323 6324 6325 6326 6327 6328 6329 6330 6331 6332 6333 6334 6335 6336 6337 6338 6339 6340 6341 6342 6343 6344 6345 6346 6347 6348 6349 6350 6351 6352 6353 6354 6355 6356 6357 6358 6359 6360 6361 6362 6363 6364 6365 6366 6367 6368 6369 6370 6371 6372 6373 6374 6375 6376 6377 6378 6379 6380 6381 6382 6383 6384 6385 6386 6387 6388 6389 6390 6391 6392 6393 6394 6395 6396 6397 6398 6399 6400 6401 6402 6403 6404 6405 6406 6407 6408 6409 6410 6411 6412 6413 6414 6415 6416 6417 6418 6419 6420 6421 6422 6423 6424 6425 6426 6427 6428 6429 6430 6431 6432 6433 6434 6435 6436 6437 6438 6439 6440 6441 6442 6443 6444 6445 6446 6447 6448 6449 6450 6451 6452 6453 6454 6455 6456 6457 6458 6459 6460 6461 6462 6463 6464 6465 6466 6467 6468 6469 6470 6471 6472 6473 6474 6475 6476 6477 6478 6479 6480 6481 6482 6483 6484 6485 6486 6487 6488 6489 6490 6491 6492 6493 6494 6495 6496 6497 6498 6499 6500 6501 6502 6503 6504 6505 6506 6507 6508 6509 6510 6511 6512 6513 6514 6515 6516 6517 6518 6519 6520 6521 6522 6523 6524 6525 6526 6527 6528 6529 6530 6531 6532 6533 6534 6535 6536 6537 6538 6539 6540 6541 6542 6543 6544 6545 6546 6547 6548 6549 6550 6551 6552 6553 6554 6555 6556 6557 6558 6559 6560 6561 6562 6563 6564 6565 6566 6567 6568 6569 6570 6571 6572 6573 6574 6575 6576 6577 6578 6579 6580 6581 6582 6583 6584 6585 6586 6587 6588 6589 6590 6591 6592 6593 6594 6595 6596 6597 6598 6599 6600 6601 6602 6603 6604 6605 6606 6607 6608 6609 6610 6611 6612 6613 6614 6615 6616 6617 6618 6619 6620 6621 6622 6623 6624 6625 6626 6627 6628 6629 6630 6631 6632 6633 6634 6635 6636 6637 6638 6639 6640 6641 6642 6643 6644 6645 6646 6647 6648 6649 6650 6651 6652 6653 6654 6655 6656 6657 6658 6659 6660 6661 6662 6663 6664 6665 6666 6667 6668 6669 6670 6671 6672 6673 6674 6675 6676 6677 6678 6679 6680 6681 6682 6683 6684 6685 6686 6687 6688 6689 6690 6691 6692 6693 6694 6695 6696 6697 6698 6699 6700 6701 6702 6703 6704 6705 6706 6707 6708 6709 6710 6711 6712 6713 6714 6715 6716 6717 6718 6719 6720 6721 6722 6723 6724 6725 6726 6727 6728 6729 6730 6731 6732 6733 6734 6735 6736 6737 6738 6739 6740 6741 6742 6743 6744 6745 6746 6747 6748 6749 6750 6751 6752 6753 6754 6755 6756 6757 6758 6759 6760 6761 6762 6763 6764 6765 6766 6767 6768 6769 6770 6771 6772 6773 6774 6775 6776 6777 6778 6779 6780 6781 6782 6783 6784 6785 6786 6787 6788 6789 6790 6791 6792 6793 6794 6795 6796 6797 6798 6799 6800 6801 6802 6803 6804 6805 6806 6807 6808 6809 6810 6811 6812 6813 6814 6815 6816 6817 6818 6819 6820 6821 6822 6823 6824 6825 6826 6827 6828 6829 6830 6831 6832 6833 6834 6835 6836 6837 6838 6839 6840 6841 6842 6843 6844 6845 6846 6847 6848 6849 6850 6851 6852 6853 6854 6855 6856 6857 6858 6859 6860 6861 6862 6863 6864 6865 6866 6867 6868 6869 6870 6871 6872 6873 6874 6875 6876 6877 6878 6879 6880 6881 6882 6883 6884 6885 6886 6887 6888 6889 6890 6891 6892 6893 6894 6895 6896 6897 6898 6899 6900 6901 6902 6903 6904 6905 6906 6907 6908 6909 6910 6911 6912 6913 6914 6915 6916 6917 6918 6919 6920 6921 6922 6923 6924 6925 6926 6927 6928 6929 6930 6931 6932 6933 6934 6935 6936 6937 6938 6939 6940 6941 6942 6943 6944 6945 6946 6947 6948 6949 6950 6951 6952 6953 6954 6955 6956 6957 6958 6959 6960 6961 6962 6963 6964 6965 6966 6967 6968 6969 6970 6971 6972 6973 6974 6975 6976 6977 6978 6979 6980 6981 6982 6983 6984 6985 6986 6987 6988 6989 6990 6991 6992 6993 6994 6995 6996 6997 6998 6999 7000 7001 7002 7003 7004 7005 7006 7007 7008 7009 7010 7011 7012 7013 7014 7015 7016 7017 7018 7019 7020 7021 7022 7023 7024 7025 7026 7027 7028 7029 7030 7031 7032 7033 7034 7035 7036 7037 7038 7039 7040 7041 7042 7043 7044 7045 7046 7047 7048 7049 7050 7051 7052 7053 7054 7055 7056 7057 7058 7059 7060 7061 7062 7063 7064 7065 7066 7067 7068 7069 7070 7071 7072 7073 7074 7075 7076 7077 7078 7079 7080 7081 7082 7083 7084 7085 7086 7087 7088 7089 7090 7091 7092 7093 7094 7095 7096 7097 7098 7099 7100 7101 7102 7103 7104 7105 7106 7107 7108 7109 7110 7111 7112 7113 7114 7115 7116 7117 7118 7119 7120 7121 7122 7123 7124 7125 7126 7127 7128 7129 7130 7131 7132 7133 7134 7135 7136 7137 7138 7139 7140 7141 7142 7143 7144 7145 7146 7147 7148 7149 7150 7151 7152 7153 7154 7155 7156 7157 7158 7159 7160 7161 7162 7163 7164 7165 7166 7167 7168 7169 7170 7171 7172 7173 7174 7175 7176 7177 7178 7179 7180 7181 7182 7183 7184 7185 7186 7187 7188 7189 7190 7191 7192 7193 7194 7195 7196 7197 7198 7199 7200 7201 7202 7203 7204 7205 7206 7207 7208 7209 7210 7211 7212 7213 7214 7215 7216 7217 7218 7219 7220 7221 7222 7223 7224 7225 7226 7227 7228 7229 7230 7231 7232 7233 7234 7235 7236 7237 7238 7239 7240 7241 7242 7243 7244 7245 7246 7247 7248 7249 7250 7251 7252 7253 7254 7255 7256 7257 7258 7259 7260 7261 7262 7263 7264 7265 7266 7267 7268 7269 7270 7271 7272 7273 7274 7275 7276 7277 7278 7279 7280 7281 7282 7283 7284 7285 7286 7287 7288 7289 7290 7291 7292 7293 7294 7295 7296 7297 7298 7299 7300 7301 7302 7303 7304 7305 7306 7307 7308 7309 7310 7311 7312 7313 7314 7315 7316 7317 7318 7319 7320 7321 7322 7323 7324 7325 7326 7327 7328 7329 7330 7331 7332 7333 7334 7335 7336 7337 7338 7339 7340 7341 7342 7343 7344 7345 7346 7347 7348 7349 7350 7351 7352 7353 7354 7355 7356 7357 7358 7359 7360 7361 7362 7363 7364 7365 7366 7367 7368 7369 7370 7371 7372 7373 7374 7375 7376 7377 7378 7379 7380 7381 7382 7383 7384 7385 7386 7387 7388 7389 7390 7391 7392 7393 7394 7395 7396 7397 7398 7399 7400 7401 7402 7403 7404 7405 7406 7407 7408 7409 7410 7411 7412 7413 7414 7415 7416 7417 7418 7419 7420 7421 7422 7423 7424 7425 7426 7427 7428 7429 7430 7431 7432 7433 7434 7435 7436 7437 7438 7439 7440 7441 7442 7443 7444 7445 7446 7447 7448 7449 7450 7451 7452 7453 7454 7455 7456 7457 7458 7459 7460 7461 7462 7463 7464 7465 7466 7467 7468 7469 7470 7471 7472 7473 7474 7475 7476 7477 7478 7479 7480 7481 7482 7483 7484 7485 7486 7487 7488 7489 7490 7491 7492 7493 7494 7495 7496 7497 7498 7499 7500 7501 7502 7503 7504 7505 7506 7507 7508 7509 7510 7511 7512 7513 7514 7515 7516 7517 7518 7519 7520 7521 7522 7523 7524 7525 7526 7527 7528 7529 7530 7531 7532 7533 7534 7535 7536 7537 7538 7539 7540 7541 7542 7543 7544 7545 7546 7547 7548 7549 7550 7551 7552 7553 7554 7555 7556 7557 7558 7559 7560 7561 7562 7563 7564 7565 7566 7567 7568 7569 7570 7571 7572 7573 7574 7575 7576 7577 7578 7579 7580 7581 7582 7583 7584 7585 7586 7587 7588 7589 7590 7591 7592 7593 7594 7595 7596 7597 7598 7599 7600 7601 7602 7603 7604 7605 7606 7607 7608 7609 7610 7611 7612 7613 7614 7615 7616 7617 7618 7619 7620 7621 7622 7623 7624 7625 7626 7627 7628 7629 7630 7631 7632 7633 7634 7635 7636 7637 7638 7639 7640 7641 7642 7643 7644 7645 7646 7647 7648 7649 7650 7651 7652 7653 7654 7655 7656 7657 7658 7659 7660 7661 7662 7663 7664 7665 7666 7667 7668 7669 7670 7671 7672 7673 7674 7675 7676 7677 7678 7679 7680 7681 7682 7683 7684 7685 7686 7687 7688 7689 7690 7691 7692 7693 7694 7695 7696 7697 7698 7699 7700 7701 7702 7703 7704 7705 7706 7707 7708 7709 7710 7711 7712 7713 7714 7715 7716 7717 7718 7719 7720 7721 7722 7723 7724 7725 7726 7727 7728 7729 7730 7731 7732 7733 7734 7735 7736 7737 7738 7739 7740 7741 7742 7743 7744 7745 7746 7747 7748 7749 7750 7751 7752 7753 7754 7755 7756 7757 7758 7759 7760 7761 7762 7763 7764 7765 7766 7767 7768 7769 7770 7771 7772 7773 7774 7775 7776 7777 7778 7779 7780 7781 7782 7783 7784 7785 7786 7787 7788 7789 7790 7791 7792 7793 7794 7795 7796 7797 7798 7799 7800 7801 7802 7803 7804 7805 7806 7807 7808 7809 7810 7811 7812 7813 7814 7815 7816 7817 7818 7819 7820 7821 7822 7823 7824 7825 7826 7827 7828 7829 7830 7831 7832 7833 7834 7835 7836 7837 7838 7839 7840 7841 7842 7843 7844 7845 7846 7847 7848 7849 7850 7851 7852 7853 7854 7855 7856 7857 7858 7859 7860 7861 7862 7863 7864 7865 7866 7867 7868 7869 7870 7871 7872 7873 7874 7875 7876 7877 7878 7879 7880 7881 7882 7883 7884 7885 7886 7887 7888 7889 7890 7891 7892 7893 7894 7895 7896 7897 7898 7899 7900 7901 7902 7903 7904 7905 7906 7907 7908 7909 7910 7911 7912 7913 7914 7915 7916 7917 7918 7919 7920 7921 7922 7923 7924 7925 7926 7927 7928 7929 7930 7931 7932 7933 7934 7935 7936 7937 7938 7939 7940 7941 7942 7943 7944 7945 7946 7947 7948 7949 7950 7951 7952 7953 7954 7955 7956 7957 7958 7959 7960 7961 7962 7963 7964 7965 7966 7967 7968 7969 7970 7971 7972 7973 7974 7975 7976 7977 7978 7979 7980 7981 7982 7983 7984 7985 7986 7987 7988 7989 7990 7991 7992 7993 7994 7995 7996 7997 7998 7999 8000 8001 8002 8003 8004 8005 8006 8007 8008 8009 8010 8011 8012 8013 8014 8015 8016 8017 8018 8019 8020 8021 8022 8023 8024 8025 8026 8027 8028 8029 8030 8031 8032 8033 8034 8035 8036 8037 8038 8039 8040 8041 8042 8043 8044 8045 8046 8047 8048 8049 8050 8051 8052 8053 8054 8055 8056 8057 8058 8059 8060 8061 8062 8063 8064 8065 8066 8067 8068 8069 8070 8071 8072 8073 8074 8075 8076 8077 8078 8079 8080 8081 8082 8083 8084 8085 8086 8087 8088 8089 8090 8091 8092 8093 8094 8095 8096 8097 8098 8099 8100 8101 8102 8103 8104 8105 8106 8107 8108 8109 8110 8111 8112 8113 8114 8115 8116 8117 8118 8119 8120 8121 8122 8123 8124 8125 8126 8127 8128 8129 8130 8131 8132 8133 8134 8135 8136 8137 8138 8139 8140 8141 8142 8143 8144 8145 8146 8147 8148 8149 8150 8151 8152 8153 8154 8155 8156 8157 8158 8159 8160 8161 8162 8163 8164 8165 8166 8167 8168 8169 8170 8171 8172 8173 8174 8175 8176 8177 8178 8179 8180 8181 8182 8183 8184 8185 8186 8187 8188 8189 8190 8191 8192 8193 8194 8195 8196 8197 8198 8199 8200 8201 8202 8203 8204 8205 8206 8207 8208 8209 8210 8211 8212 8213 8214 8215 8216 8217 8218 8219 8220 8221 8222 8223 8224 8225 8226 8227 8228 8229 8230 8231 8232 8233 8234 8235 8236 8237 8238 8239 8240 8241 8242 8243 8244 8245 8246 8247 8248 8249 8250 8251 8252 8253 8254 8255 8256 8257 8258 8259 8260 8261 8262 8263 8264 8265 8266 8267 8268 8269 8270 8271 8272 8273 8274 8275 8276 8277 8278 8279 8280 8281 8282 8283 8284 8285 8286 8287 8288 8289 8290 8291 8292 8293 8294 8295 8296 8297 8298 8299 8300 8301 8302 8303 8304 8305 8306 8307 8308 8309 8310 8311 8312 8313 8314 8315 8316 8317 8318 8319 8320 8321 8322 8323 8324 8325 8326 8327 8328 8329 8330 8331 8332 8333 8334 8335 8336 8337 8338 8339 8340 8341 8342 8343 8344 8345 8346 8347 8348 8349 8350 8351 8352 8353 8354 8355 8356 8357 8358 8359 8360 8361 8362 8363 8364 8365 8366 8367 8368 8369 8370 8371 8372 8373 8374 8375 8376 8377 8378 8379 8380 8381 8382 8383 8384 8385 8386 8387 8388 8389 8390 8391 8392 8393 8394 8395 8396 8397 8398 8399 8400 8401 8402 8403 8404 8405 8406 8407 8408 8409 8410 8411 8412 8413 8414 8415 8416 8417 8418 8419 8420 8421 8422 8423 8424 8425 8426 8427 8428 8429 8430 8431 8432 8433 8434 8435 8436 8437 8438 8439 8440 8441 8442 8443 8444 8445 8446 8447 8448 8449 8450 8451 8452 8453 8454 8455 8456 8457 8458 8459 8460 8461 8462 8463 8464 8465 8466 8467 8468 8469 8470 8471 8472 8473 8474 8475 8476 8477 8478 8479 8480 8481 8482 8483 8484 8485 8486 8487 8488 8489 8490 8491 8492 8493 8494 8495 8496 8497 8498 8499 8500 8501 8502 8503 8504 8505 8506 8507 8508 8509 8510 8511 8512 8513 8514 8515 8516 8517 8518 8519 8520 8521 8522 8523 8524 8525 8526 8527 8528 8529 8530 8531 8532 8533 8534 8535 8536 8537 8538 8539 8540 8541 8542 8543 8544 8545 8546 8547 8548 8549 8550 8551 8552 8553 8554 8555 8556 8557 8558 8559 8560 8561 8562 8563 8564 8565 8566 8567 8568 8569 8570 8571 8572 8573 8574 8575 8576 8577 8578 8579 8580 8581 8582 8583 8584 8585 8586 8587 8588 8589 8590 8591 8592 8593 8594 8595 8596 8597 8598 8599 8600 8601 8602 8603 8604 8605 8606 8607 8608 8609 8610 8611 8612 8613 8614 8615 8616 8617 8618 8619 8620 8621 8622 8623 8624 8625 8626 8627 8628 8629 8630 8631 8632 8633 8634 8635 8636 8637 8638 8639 8640 8641 8642 8643 8644 8645 8646 8647 8648 8649 8650 8651 8652 8653 8654 8655 8656 8657 8658 8659 8660 8661 8662 8663 8664 8665 8666 8667 8668 8669 8670 8671 8672 8673 8674 8675 8676 8677 8678 8679 8680 8681 8682 8683 8684 8685 8686 8687 8688 8689 8690 8691 8692 8693 8694 8695 8696 8697 8698 8699 8700 8701 8702 8703 8704 8705 8706 8707 8708 8709 8710 8711 8712 8713 8714 8715 8716 8717 8718 8719 8720 8721 8722 8723 8724 8725 8726 8727 8728 8729 8730 8731 8732 8733 8734 8735 8736 8737 8738 8739 8740 8741 8742 8743 8744 8745 8746 8747 8748 8749 8750 8751 8752 8753 8754 8755 8756 8757 8758 8759 8760 8761 8762 8763 8764 8765 8766 8767 8768 8769 8770 8771 8772 8773 8774 8775 8776 8777 8778 8779 8780 8781 8782 8783 8784 8785 8786 8787 8788 8789 8790 8791 8792 8793 8794 8795 8796 8797 8798 8799 8800 8801 8802 8803 8804 8805 8806 8807 8808 8809 8810 8811 8812 8813 8814 8815 8816 8817 8818 8819 8820 8821 8822 8823 8824 8825 8826 8827 8828 8829 8830 8831 8832 8833 8834 8835 8836 8837 8838 8839 8840 8841 8842 8843 8844 8845 8846 8847 8848 8849 8850 8851 8852 8853 8854 8855 8856 8857 8858 8859 8860 8861 8862 8863 8864 8865 8866 8867 8868 8869 8870 8871 8872 8873 8874 8875 8876 8877 8878 8879 8880 8881 8882 8883 8884 8885 8886 8887 8888 8889 8890 8891 8892 8893 8894 8895 8896 8897 8898 8899 8900 8901 8902 8903 8904 8905 8906 8907 8908 8909 8910 8911 8912 8913 8914 8915 8916 8917 8918 8919 8920 8921 8922 8923 8924 8925 8926 8927 8928 8929 8930 8931 8932 8933 8934 8935 8936 8937 8938 8939 8940 8941 8942 8943 8944 8945 8946 8947 8948 8949 8950 8951 8952 8953 8954 8955 8956 8957 8958 8959 8960 8961 8962 8963 8964 8965 8966 8967 8968 8969 8970 8971 8972 8973 8974 8975 8976 8977 8978 8979 8980 8981 8982 8983 8984 8985 8986 8987 8988 8989 8990 8991 8992 8993 8994 8995 8996 8997 8998 8999 9000 9001 9002 9003 9004 9005 9006 9007 9008 9009 9010 9011 9012 9013 9014 9015 9016 9017 9018 9019 9020 9021 9022 9023 9024 9025 9026 9027 9028 9029 9030 9031 9032 9033 9034 9035 9036 9037 9038 9039 9040 9041 9042 9043 9044 9045 9046 9047 9048 9049 9050 9051 9052 9053 9054 9055 9056 9057 9058 9059 9060 9061 9062 9063 9064 9065 9066 9067 9068 9069 9070 9071 9072 9073 9074 9075 9076 9077 9078 9079 9080 9081 9082 9083 9084 9085 9086 9087 9088 9089 9090 9091 9092 9093 9094 9095 9096 9097 9098 9099 9100 9101 9102 9103 9104 9105 9106 9107 9108 9109 9110 9111 9112 9113 9114 9115 9116 9117 9118 9119 9120 9121 9122 9123 9124 9125 9126 9127 9128 9129 9130 9131 9132 9133 9134 9135 9136 9137 9138 9139 9140 9141 9142 9143 9144 9145 9146 9147 9148 9149 9150 9151 9152 9153 9154 9155 9156 9157 9158 9159 9160 9161 9162 9163 9164 9165 9166 9167 9168 9169 9170 9171 9172 9173 9174 9175 9176 9177 9178 9179 9180 9181 9182 9183 9184 9185 9186 9187 9188 9189 9190 9191 9192 9193 9194 9195 9196 9197 9198 9199 9200 9201 9202 9203 9204 9205 9206 9207 9208 9209 9210 9211 9212 9213 9214 9215 9216 9217 9218 9219 9220 9221 9222 9223 9224 9225 9226 9227 9228 9229 9230 9231 9232 9233 9234 9235 9236 9237 9238 9239 9240 9241 9242 9243 9244 9245 9246 9247 9248 9249 9250 9251 9252 9253 9254 9255 9256 9257 9258 9259 9260 9261 9262 9263 9264 9265 9266 9267 9268 9269 9270 9271 9272 9273 9274 9275 9276 9277 9278 9279 9280 9281 9282 9283 9284 9285 9286 9287 9288 9289 9290 9291 9292 9293 9294 9295 9296 9297 9298 9299 9300 9301 9302 9303 9304 9305 9306 9307 9308 9309 9310 9311 9312 9313 9314 9315 9316 9317 9318 9319 9320 9321 9322 9323 9324 9325 9326 9327 9328 9329 9330 9331 9332 9333 9334 9335 9336 9337 9338 9339 9340 9341 9342 9343 9344 9345 9346 9347 9348 9349 9350 9351 9352 9353 9354 9355 9356 9357 9358 9359 9360 9361 9362 9363 9364 9365 9366 9367 9368 9369 9370 9371 9372 9373 9374 9375 9376 9377 9378 9379 9380 9381 9382 9383 9384 9385 9386 9387 9388 9389 9390 9391 9392 9393 9394 9395 9396 9397 9398 9399 9400 9401 9402 9403 9404 9405 9406 9407 9408 9409 9410 9411 9412 9413 9414 9415 9416 9417 9418 9419 9420 9421 9422 9423 9424 9425 9426 9427 9428 9429 9430 9431 9432 9433 9434 9435 9436 9437 9438 9439 9440 9441 9442 9443 9444 9445 9446 9447 9448 9449 9450 9451 9452 9453 9454 9455 9456 9457 9458 9459 9460 9461 9462 9463 9464 9465 9466 9467 9468 9469 9470 9471 9472 9473 9474 9475 9476 9477 9478 9479 9480 9481 9482 9483 9484 9485 9486 9487 9488 9489 9490 9491 9492 9493 9494 9495 9496 9497 9498 9499 9500 9501 9502 9503 9504 9505 9506 9507 9508 9509 9510 9511 9512 9513 9514 9515 9516 9517 9518 9519 9520 9521 9522 9523 9524 9525 9526 9527 9528 9529 9530 9531 9532 9533 9534 9535 9536 9537 9538 9539 9540 9541 9542 9543 9544 9545 9546 9547 9548 9549 9550 9551 9552 9553 9554 9555 9556 9557 9558 9559 9560 9561 9562 9563 9564 9565 9566 9567 9568 9569 9570 9571 9572 9573 9574 9575 9576 9577 9578 9579 9580 9581 9582 9583 9584 9585 9586 9587 9588 9589 9590 9591 9592 9593 9594 9595 9596 9597 9598 9599 9600 9601 9602 9603 9604 9605 9606 9607 9608 9609 9610 9611 9612 9613 9614 9615 9616 9617 9618 9619 9620 9621 9622 9623 9624 9625 9626 9627 9628 9629 9630 9631 9632 9633 9634 9635 9636 9637 9638 9639 9640 9641 9642 9643 9644 9645 9646 9647 9648 9649 9650 9651 9652 9653 9654 9655 9656 9657 9658 9659 9660 9661 9662 9663 9664 9665 9666 9667 9668 9669 9670 9671 9672 9673 9674 9675 9676 9677 9678 9679 9680 9681 9682 9683 9684 9685 9686 9687 9688 9689 9690 9691 9692 9693 9694 9695 9696 9697 9698 9699 9700 9701 9702 9703 9704 9705 9706 9707 9708 9709 9710 9711 9712 9713 9714 9715 9716 9717 9718 9719 9720 9721 9722 9723 9724 9725 9726 9727 9728 9729 9730 9731 9732 9733 9734 9735 9736 9737 9738 9739 9740 9741 9742 9743 9744 9745 9746 9747 9748 9749 9750 9751 9752 9753 9754 9755 9756 9757 9758 9759 9760 9761 9762 9763 9764 9765 9766 9767 9768 9769 9770 9771 9772 9773 9774 9775 9776 9777 9778 9779 9780 9781 9782 9783 9784 9785 9786 9787 9788 9789 9790 9791 9792 9793 9794 9795 9796 9797 9798 9799 9800 9801 9802 9803 9804 9805 9806 9807 9808 9809 9810 9811 9812 9813 9814 9815 9816 9817 9818 9819 9820 9821 9822 9823 9824 9825 9826 9827 9828 9829 9830 9831 9832 9833 9834 9835 9836 9837 9838 9839 9840 9841 9842 9843 9844 9845 9846 9847 9848 9849 9850 9851 9852 9853 9854 9855 9856 9857 9858 9859 9860 9861 9862 9863 9864 9865 9866 9867 9868 9869 9870 9871 9872 9873 9874 9875 9876 9877 9878 9879 9880 9881 9882 9883 9884 9885 9886 9887 9888 9889 9890 9891 9892 9893 9894 9895 9896 9897 9898 9899 9900 9901 9902 9903 9904 9905 9906 9907 9908 9909 9910 9911 9912 9913 9914 9915 9916 9917 9918 9919 9920 9921 9922 9923 9924 9925 9926 9927 9928 9929 9930 9931 9932 9933 9934 9935 9936 9937 9938 9939 9940 9941 9942 9943 9944 9945 9946 9947 9948 9949 9950 9951 9952 9953 9954 9955 9956 9957 9958 9959 9960 9961 9962 9963 9964 9965 9966 9967 9968 9969 9970 9971 9972 9973 9974 9975 9976 9977 9978 9979 9980 9981 9982 9983 9984 9985 9986 9987 9988 9989 9990 9991 9992 9993 9994 9995 9996 9997 9998 9999 10000 10001 10002 10003 10004 10005 10006 10007 10008 10009 10010 10011 10012 10013 10014 10015 10016 10017 10018 10019 10020 10021 10022 10023 10024 10025 10026 10027 10028 10029 10030 10031 10032 10033 10034 10035 10036 10037 10038 10039 10040 10041 10042 10043 10044 10045 10046 10047 10048 10049 10050 10051 10052 10053 10054 10055 10056 10057 10058 10059 10060 10061 10062 10063 10064 10065 10066 10067 10068 10069 10070 10071 10072 10073 10074 10075 10076 10077 10078 10079 10080 10081 10082 10083 10084 10085 10086 10087 10088 10089 10090 10091 10092 10093 10094 10095 10096 10097 10098 10099 10100 10101 10102 10103 10104 10105 10106 10107 10108 10109 10110 10111 10112 10113 10114 10115 10116 10117 10118 10119 10120 10121 10122 10123 10124 10125 10126 10127 10128 10129 10130 10131 10132 10133 10134 10135 10136 10137 10138 10139 10140 10141 10142 10143 10144 10145 10146 10147 10148 10149 10150 10151 10152 10153 10154 10155 10156 10157 10158 10159 10160 10161 10162 10163 10164 10165 10166 10167 10168 10169 10170 10171 10172 10173 10174 10175 10176 10177 10178 10179 10180 10181 10182 10183 10184 10185 10186 10187 10188 10189 10190 10191 10192 10193 10194 10195 10196 10197 10198 10199 10200 10201 10202 10203 10204 10205 10206 10207 10208 10209 10210 10211 10212 10213 10214 10215 10216 10217 10218 10219 10220 10221 10222 10223 10224 10225 10226 10227 10228 10229 10230 10231 10232 10233 10234 10235 10236 10237 10238 10239 10240 10241 10242 10243 10244 10245 10246 10247 10248 10249 10250 10251 10252 10253 10254 10255 10256 10257 10258 10259 10260 10261 10262 10263 10264 10265 10266 10267 10268 10269 10270 10271 10272 10273 10274 10275 10276 10277 10278 10279 10280 10281 10282 10283 10284 10285 10286 10287 10288 10289 10290 10291 10292 10293 10294 10295 10296 10297 10298 10299 10300 10301 10302 10303 10304 10305 10306 10307 10308 10309 10310 10311 10312 10313 10314 10315 10316 10317 10318 10319 10320 10321 10322 10323 10324 10325 10326 10327 10328 10329 10330 10331 10332 10333 10334 10335 10336 10337 10338 10339 10340 10341 10342 10343 10344 10345 10346 10347 10348 10349 10350 10351 10352 10353 10354 10355 10356 10357 10358 10359 10360 10361 10362 10363 10364 10365 10366 10367 10368 10369 10370 10371 10372 10373 10374 10375 10376 10377 10378 10379 10380 10381 10382 10383 10384 10385 10386 10387 10388 10389 10390 10391 10392 10393 10394 10395 10396 10397 10398 10399 10400 10401 10402 10403 10404 10405 10406 10407 10408 10409 10410 10411 10412 10413 10414 10415 10416 10417 10418 10419 10420 10421 10422 10423 10424 10425 10426 10427 10428 10429 10430 10431 10432 10433 10434 10435 10436 10437 10438 10439 10440 10441 10442 10443 10444 10445 10446 10447 10448 10449 10450 10451 10452 10453 10454 10455 10456 10457 10458 10459 10460 10461 10462 10463 10464 10465 10466 10467 10468 10469 10470 10471 10472 10473 10474 10475 10476 10477 10478 10479 10480 10481 10482 10483 10484 10485 10486 10487 10488 10489 10490 10491 10492 10493 10494 10495 10496 10497 10498 10499 10500 10501 10502 10503 10504 10505 10506 10507 10508 10509 10510 10511 10512 10513 10514 10515 10516 10517 10518 10519 10520 10521 10522 10523 10524 10525 10526 10527 10528 10529 10530 10531 10532 10533 10534 10535 10536 10537 10538 10539 10540 10541 10542 10543 10544 10545 10546 10547 10548 10549 10550 10551 10552 10553 10554 10555 10556 10557 10558 10559 10560 10561 10562 10563 10564 10565 10566 10567 10568 10569 10570 10571 10572 10573 10574 10575 10576 10577 10578 10579 10580 10581 10582 10583 10584 10585 10586 10587 10588 10589 10590 10591 10592 10593 10594 10595 10596 10597 10598 10599 10600 10601 10602 10603 10604 10605 10606 10607 10608 10609 10610 10611 10612 10613 10614 10615 10616 10617 10618 10619 10620 10621 10622 10623 10624 10625 10626 10627 10628 10629 10630 10631 10632 10633 10634 10635 10636 10637 10638 10639 10640 10641 10642 10643 10644 10645 10646 10647 10648 10649 10650 10651 10652 10653 10654 10655 10656 10657 10658 10659 10660 10661 10662 10663 10664 10665 10666 10667 10668 10669 10670 10671 10672 10673 10674 10675 10676 10677 10678 10679 10680 10681 10682 10683 10684 10685 10686 10687 10688 10689 10690 10691 10692 10693 10694 10695 10696 10697 10698 10699 10700 10701 10702 10703 10704 10705 10706 10707 10708 10709 10710 10711 10712 10713 10714 10715 10716 10717 10718 10719 10720 10721 10722 10723 10724 10725 10726 10727 10728 10729 10730 10731 10732 10733 10734 10735 10736 10737 10738 10739 10740 10741 10742 10743 10744 10745 10746 10747 10748 10749 10750 10751 10752 10753 10754 10755 10756 10757 10758 10759 10760 10761 10762 10763 10764 10765 10766 10767 10768 10769 10770 10771 10772 10773 10774 10775 10776 10777 10778 10779 10780 10781 10782 10783 10784 10785 10786 10787 10788 10789 10790 10791 10792 10793 10794 10795 10796 10797 10798 10799 10800 10801 10802 10803 10804 10805 10806 10807 10808 10809 10810 10811 10812 10813 10814 10815 10816 10817 10818 10819 10820 10821 10822 10823 10824 10825 10826 10827 10828 10829 10830 10831 10832 10833 10834 10835 10836 10837 10838 10839 10840 10841 10842 10843 10844 10845 10846 10847 10848 10849 10850 10851 10852 10853 10854 10855 10856 10857 10858 10859 10860 10861 10862 10863 10864 10865 10866 10867 10868 10869 10870 10871 10872 10873 10874 10875 10876 10877 10878 10879 10880 10881 10882 10883 10884 10885 10886 10887 10888 10889 10890 10891 10892 10893 10894 10895 10896 10897 10898 10899 10900 10901 10902 10903 10904 10905 10906 10907 10908 10909 10910 10911 10912 10913 10914 10915 10916 10917 10918 10919 10920 10921 10922 10923 10924 10925 10926 10927 10928 10929 10930 10931 10932 10933 10934 10935 10936 10937 10938 10939 10940 10941 10942 10943 10944 10945 10946 10947 10948 10949 10950 10951 10952 10953 10954 10955 10956 10957 10958 10959 10960 10961 10962 10963 10964 10965 10966 10967 10968 10969 10970 10971 10972 10973 10974 10975 10976 10977 10978 10979 10980 10981 10982 10983 10984 10985 10986 10987 10988 10989 10990 10991 10992 10993 10994 10995 10996 10997 10998 10999 11000 11001 11002 11003 11004 11005 11006 11007 11008 11009 11010 11011 11012 11013 11014 11015 11016 11017 11018 11019 11020 11021 11022 11023 11024 11025 11026 11027 11028 11029 11030 11031 11032 11033 11034 11035 11036 11037 11038 11039 11040 11041 11042 11043 11044 11045 11046 11047 11048 11049 11050 11051 11052 11053 11054 11055 11056 11057 11058 11059 11060 11061 11062 11063 11064 11065 11066 11067 11068 11069 11070 11071 11072 11073 11074 11075 11076 11077 11078 11079 11080 11081 11082 11083 11084 11085 11086 11087 11088 11089 11090 11091 11092 11093 11094 11095 11096 11097 11098 11099 11100 11101 11102 11103 11104 11105 11106 11107 11108 11109 11110 11111 11112 11113 11114 11115 11116 11117 11118 11119 11120 11121 11122 11123 11124 11125 11126 11127 11128 11129 11130 11131 11132 11133 11134 11135 11136 11137 11138 11139 11140 11141 11142 11143 11144 11145 11146 11147 11148 11149 11150 11151 11152 11153 11154 11155 11156 11157 11158 11159 11160 11161 11162 11163 11164 11165 11166 11167 11168 11169 11170 11171 11172 11173 11174 11175 11176 11177 11178 11179 11180 11181 11182 11183 11184 11185 11186 11187 11188 11189 11190 11191 11192 11193 11194 11195 11196 11197 11198 11199 11200 11201 11202 11203 11204 11205 11206 11207 11208 11209 11210 11211 11212 11213 11214 11215 11216 11217 11218 11219 11220 11221 11222 11223 11224 11225 11226 11227 11228 11229 11230 11231 11232 11233 11234 11235 11236 11237 11238 11239 11240 11241 11242 11243 11244 11245 11246 11247 11248 11249 11250 11251 11252 11253 11254 11255 11256 11257 11258 11259 11260 11261 11262 11263 11264 11265 11266 11267 11268 11269 11270 11271 11272 11273 11274 11275 11276 11277 11278 11279 11280 11281 11282 11283 11284 11285 11286 11287 11288 11289 11290 11291 11292 11293 11294 11295 11296 11297 11298 11299 11300 11301 11302 11303 11304 11305 11306 11307 11308 11309 11310 11311 11312 11313 11314 11315 11316 11317 11318 11319 11320 11321 11322 11323 11324 11325 11326 11327 11328 11329 11330 11331 11332 11333 11334 11335 11336 11337 11338 11339 11340 11341 11342 11343 11344 11345 11346 11347 11348 11349 11350 11351 11352 11353 11354 11355 11356 11357 11358 11359 11360 11361 11362 11363 11364 11365 11366 11367 11368 11369 11370 11371 11372 11373 11374 11375 11376 11377 11378 11379 11380 11381 11382 11383 11384 11385 11386 11387 11388 11389 11390 11391 11392 11393 11394 11395 11396 11397 11398 11399 11400 11401 11402 11403 11404 11405 11406 11407 11408 11409 11410 11411 11412 11413 11414 11415 11416 11417 11418 11419 11420 11421 11422 11423 11424 11425 11426 11427 11428 11429 11430 11431 11432 11433 11434 11435 11436 11437 11438 11439 11440 11441 11442 11443 11444 11445 11446 11447 11448 11449 11450 11451 11452 11453 11454 11455 11456 11457 11458 11459 11460 11461 11462 11463 11464 11465 11466 11467 11468 11469 11470 11471 11472 11473 11474 11475 11476 11477 11478 11479 11480 11481 11482 11483 11484 11485 11486 11487 11488 11489 11490 11491 11492 11493 11494 11495 11496 11497 11498 11499 11500 11501 11502 11503 11504 11505 11506 11507 11508 11509 11510 11511 11512 11513 11514 11515 11516 11517 11518 11519 11520 11521 11522 11523 11524 11525 11526 11527 11528 11529 11530 11531 11532 11533 11534 11535 11536 11537 11538 11539 11540 11541 11542 11543 11544 11545 11546 11547 11548 11549 11550 11551 11552 11553 11554 11555 11556 11557 11558 11559 11560 11561 11562 11563 11564 11565 11566 11567 11568 11569 11570 11571 11572 11573 11574 11575 11576 11577 11578 11579 11580 11581 11582 11583 11584 11585 11586 11587 11588 11589 11590 11591 11592 11593 11594 11595 11596 11597 11598 11599 11600 11601 11602 11603 11604 11605 11606 11607 11608 11609 11610 11611 11612 11613 11614 11615 11616 11617 11618 11619 11620 11621 11622 11623 11624 11625 11626 11627 11628 11629 11630 11631 11632 11633 11634 11635 11636 11637 11638 11639 11640 11641 11642 11643 11644 11645 11646 11647 11648 11649 11650 11651 11652 11653 11654 11655 11656 11657 11658 11659 11660 11661 11662 11663 11664 11665 11666 11667 11668 11669 11670 11671 11672 11673 11674 11675 11676 11677 11678 11679 11680 11681 11682 11683 11684 11685 11686 11687 11688 11689 11690 11691 11692 11693 11694 11695 11696 11697 11698 11699 11700 11701 11702 11703 11704 11705 11706 11707 11708 11709 11710 11711 11712 11713 11714 11715 11716 11717 11718 11719 11720 11721 11722 11723 11724 11725 11726 11727 11728 11729 11730 11731 11732 11733 11734 11735 11736 11737 11738 11739 11740 11741 11742 11743 11744 11745 11746 11747 11748 11749 11750 11751 11752 11753 11754 11755 11756 11757 11758 11759 11760 11761 11762 11763 11764 11765 11766 11767 11768 11769 11770 11771 11772 11773 11774 11775 11776 11777 11778 11779 11780 11781 11782 11783 11784 11785 11786 11787 11788 11789 11790 11791 11792 11793 11794 11795 11796 11797 11798 11799 11800 11801 11802 11803 11804 11805 11806 11807 11808 11809 11810 11811 11812 11813 11814 11815 11816 11817 11818 11819 11820 11821 11822 11823 11824 11825 11826 11827 11828 11829 11830 11831 11832 11833 11834 11835 11836 11837 11838 11839 11840 11841 11842 11843 11844 11845 11846 11847 11848 11849 11850 11851 11852 11853 11854 11855 11856 11857 11858 11859 11860 11861 11862 11863 11864 11865 11866 11867 11868 11869 11870 11871 11872 11873 11874 11875 11876 11877 11878 11879 11880 11881 11882 11883 11884 11885 11886 11887 11888 11889 11890 11891 11892 11893 11894 11895 11896 11897 11898 11899 11900 11901 11902 11903 11904 11905 11906 11907 11908 11909 11910 11911 11912 11913 11914 11915 11916 11917 11918 11919 11920 11921 11922 11923 11924 11925 11926 11927 11928 11929 11930 11931 11932 11933 11934 11935 11936 11937 11938 11939 11940 11941 11942 11943 11944 11945 11946 11947 11948 11949 11950 11951 11952 11953 11954 11955 11956 11957 11958 11959 11960 11961 11962 11963 11964 11965 11966 11967 11968 11969 11970 11971 11972 11973 11974 11975 11976 11977 11978 11979 11980 11981 11982 11983 11984 11985 11986 11987 11988 11989 11990 11991 11992 11993 11994 11995 11996 11997 11998 11999 12000 12001 12002 12003 12004 12005 12006 12007 12008 12009 12010 12011 12012 12013 12014 12015 12016 12017 12018 12019 12020 12021 12022 12023 12024 12025 12026 12027 12028 12029 12030 12031 12032 12033 12034 12035 12036 12037 12038 12039 12040 12041 12042 12043 12044 12045 12046 12047 12048 12049 12050 12051 12052 12053 12054 12055 12056 12057 12058 12059 12060 12061 12062 12063 12064 12065 12066 12067 12068 12069 12070 12071 12072 12073 12074 12075 12076 12077 12078 12079 12080 12081 12082 12083 12084 12085 12086 12087 12088 12089 12090 12091 12092 12093 12094 12095 12096 12097 12098 12099 12100 12101 12102 12103 12104 12105 12106 12107 12108 12109 12110 12111 12112 12113 12114 12115 12116 12117 12118 12119 12120 12121 12122 12123 12124 12125 12126 12127 12128 12129 12130 12131 12132 12133 12134 12135 12136 12137 12138 12139 12140 12141 12142 12143 12144 12145 12146 12147 12148 12149 12150 12151 12152 12153 12154 12155 12156 12157 12158 12159 12160 12161 12162 12163 12164 12165 12166 12167 12168 12169 12170 12171 12172 12173 12174 12175 12176 12177 12178 12179 12180 12181 12182 12183 12184 12185 12186 12187 12188 12189 12190 12191 12192 12193 12194 12195 12196 12197 12198 12199 12200 12201 12202 12203 12204 12205 12206 12207 12208 12209 12210 12211 12212 12213 12214 12215 12216 12217 12218 12219 12220 12221 12222 12223 12224 12225 12226 12227 12228 12229 12230 12231 12232 12233 12234 12235 12236 12237 12238 12239 12240 12241 12242 12243 12244 12245 12246 12247 12248 12249 12250 12251 12252 12253 12254 12255 12256 12257 12258 12259 12260 12261 12262 12263 12264 12265 12266 12267 12268 12269 12270 12271 12272 12273 12274 12275 12276 12277 12278 12279 12280 12281 12282 12283 12284 12285 12286 12287 12288 12289 12290 12291 12292 12293 12294 12295 12296 12297 12298 12299 12300 12301 12302 12303 12304 12305 12306 12307 12308 12309 12310 12311 12312 12313 12314 12315 12316 12317 12318 12319 12320 12321 12322 12323 12324 12325 12326 12327 12328 12329 12330 12331 12332 12333 12334 12335 12336 12337 12338 12339 12340 12341 12342 12343 12344 12345 12346 12347 12348 12349 12350 12351 12352 12353 12354 12355 12356 12357 12358 12359 12360 12361 12362 12363 12364 12365 12366 12367 12368 12369 12370 12371 12372 12373 12374 12375 12376 12377 12378 12379 12380 12381 12382 12383 12384 12385 12386 12387 12388 12389 12390 12391 12392 12393 12394 12395 12396 12397 12398 12399 12400 12401 12402 12403 12404 12405 12406 12407 12408 12409 12410 12411 12412 12413 12414 | /* Generated automatically by the program `genflags'
from the machine description file `md'. */
#ifndef GCC_INSN_FLAGS_H
#define GCC_INSN_FLAGS_H
#define HAVE_x86_fnstsw_1 (TARGET_80387)
#define HAVE_x86_sahf_1 (TARGET_SAHF)
#define HAVE_kmovw (!(MEM_P (operands[0]) && MEM_P (operands[1])) && TARGET_AVX512F)
#define HAVE_insvhi_1 1
#define HAVE_insvsi_1 1
#define HAVE_insvdi_1 (TARGET_64BIT)
#define HAVE_swapxf (TARGET_80387)
#define HAVE_zero_extendqidi2 (TARGET_64BIT)
#define HAVE_zero_extendhidi2 (TARGET_64BIT)
#define HAVE_zero_extendqisi2_and (TARGET_ZERO_EXTEND_WITH_AND && optimize_function_for_speed_p (cfun))
#define HAVE_zero_extendhisi2_and (TARGET_ZERO_EXTEND_WITH_AND && optimize_function_for_speed_p (cfun))
#define HAVE_zero_extendqihi2_and (TARGET_ZERO_EXTEND_WITH_AND && optimize_function_for_speed_p (cfun))
#define HAVE_extendsidi2_1 (!TARGET_64BIT)
#define HAVE_extendqidi2 (TARGET_64BIT)
#define HAVE_extendhidi2 (TARGET_64BIT)
#define HAVE_extendhisi2 1
#define HAVE_extendqisi2 1
#define HAVE_extendqihi2 1
#define HAVE_truncxfsf2_i387_noop (TARGET_80387 && flag_unsafe_math_optimizations)
#define HAVE_truncxfdf2_i387_noop (TARGET_80387 && flag_unsafe_math_optimizations)
#define HAVE_fix_truncsfsi_sse (SSE_FLOAT_MODE_P (SFmode) \
&& (!TARGET_FISTTP || TARGET_SSE_MATH))
#define HAVE_fix_truncsfdi_sse ((SSE_FLOAT_MODE_P (SFmode) \
&& (!TARGET_FISTTP || TARGET_SSE_MATH)) && (TARGET_64BIT))
#define HAVE_fix_truncdfsi_sse (SSE_FLOAT_MODE_P (DFmode) \
&& (!TARGET_FISTTP || TARGET_SSE_MATH))
#define HAVE_fix_truncdfdi_sse ((SSE_FLOAT_MODE_P (DFmode) \
&& (!TARGET_FISTTP || TARGET_SSE_MATH)) && (TARGET_64BIT))
#define HAVE_fix_trunchi_fisttp_i387_1 (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& TARGET_FISTTP \
&& !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& (TARGET_64BIT || HImode != DImode)) \
&& TARGET_SSE_MATH) \
&& can_create_pseudo_p ())
#define HAVE_fix_truncsi_fisttp_i387_1 (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& TARGET_FISTTP \
&& !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& (TARGET_64BIT || SImode != DImode)) \
&& TARGET_SSE_MATH) \
&& can_create_pseudo_p ())
#define HAVE_fix_truncdi_fisttp_i387_1 (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& TARGET_FISTTP \
&& !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& (TARGET_64BIT || DImode != DImode)) \
&& TARGET_SSE_MATH) \
&& can_create_pseudo_p ())
#define HAVE_fix_trunchi_i387_fisttp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& TARGET_FISTTP \
&& !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& (TARGET_64BIT || HImode != DImode)) \
&& TARGET_SSE_MATH))
#define HAVE_fix_truncsi_i387_fisttp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& TARGET_FISTTP \
&& !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& (TARGET_64BIT || SImode != DImode)) \
&& TARGET_SSE_MATH))
#define HAVE_fix_truncdi_i387_fisttp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& TARGET_FISTTP \
&& !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& (TARGET_64BIT || DImode != DImode)) \
&& TARGET_SSE_MATH))
#define HAVE_fix_trunchi_i387_fisttp_with_temp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& TARGET_FISTTP \
&& !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& (TARGET_64BIT || HImode != DImode)) \
&& TARGET_SSE_MATH))
#define HAVE_fix_truncsi_i387_fisttp_with_temp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& TARGET_FISTTP \
&& !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& (TARGET_64BIT || SImode != DImode)) \
&& TARGET_SSE_MATH))
#define HAVE_fix_truncdi_i387_fisttp_with_temp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& TARGET_FISTTP \
&& !((SSE_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& (TARGET_64BIT || DImode != DImode)) \
&& TARGET_SSE_MATH))
#define HAVE_fix_truncdi_i387 (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& !TARGET_FISTTP \
&& !(TARGET_64BIT && SSE_FLOAT_MODE_P (GET_MODE (operands[1]))))
#define HAVE_fix_truncdi_i387_with_temp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& !TARGET_FISTTP \
&& !(TARGET_64BIT && SSE_FLOAT_MODE_P (GET_MODE (operands[1]))))
#define HAVE_fix_trunchi_i387 (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& !TARGET_FISTTP \
&& !SSE_FLOAT_MODE_P (GET_MODE (operands[1])))
#define HAVE_fix_truncsi_i387 (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& !TARGET_FISTTP \
&& !SSE_FLOAT_MODE_P (GET_MODE (operands[1])))
#define HAVE_fix_trunchi_i387_with_temp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& !TARGET_FISTTP \
&& !SSE_FLOAT_MODE_P (GET_MODE (operands[1])))
#define HAVE_fix_truncsi_i387_with_temp (X87_FLOAT_MODE_P (GET_MODE (operands[1])) \
&& !TARGET_FISTTP \
&& !SSE_FLOAT_MODE_P (GET_MODE (operands[1])))
#define HAVE_x86_fnstcw_1 (TARGET_80387)
#define HAVE_x86_fldcw_1 (TARGET_80387)
#define HAVE_floathisf2 (TARGET_80387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387))
#define HAVE_floathidf2 (TARGET_80387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387))
#define HAVE_floathixf2 (TARGET_80387 \
&& (!(SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387))
#define HAVE_floatsixf2 (TARGET_80387)
#define HAVE_floatdixf2 (TARGET_80387)
#define HAVE_floatdisf2_i387_with_xmm (TARGET_80387 && X87_ENABLE_FLOAT (SFmode, DImode) \
&& TARGET_SSE2 && TARGET_INTER_UNIT_MOVES_TO_VEC \
&& !TARGET_64BIT && optimize_function_for_speed_p (cfun))
#define HAVE_floatdidf2_i387_with_xmm (TARGET_80387 && X87_ENABLE_FLOAT (DFmode, DImode) \
&& TARGET_SSE2 && TARGET_INTER_UNIT_MOVES_TO_VEC \
&& !TARGET_64BIT && optimize_function_for_speed_p (cfun))
#define HAVE_floatdixf2_i387_with_xmm (TARGET_80387 && X87_ENABLE_FLOAT (XFmode, DImode) \
&& TARGET_SSE2 && TARGET_INTER_UNIT_MOVES_TO_VEC \
&& !TARGET_64BIT && optimize_function_for_speed_p (cfun))
#define HAVE_addsi_1_zext (TARGET_64BIT && ix86_binary_operator_ok (PLUS, SImode, operands))
#define HAVE_addqi_ext_1 1
#define HAVE_addqi3_carry (ix86_binary_operator_ok (PLUS, QImode, operands))
#define HAVE_addhi3_carry (ix86_binary_operator_ok (PLUS, HImode, operands))
#define HAVE_addsi3_carry (ix86_binary_operator_ok (PLUS, SImode, operands))
#define HAVE_adddi3_carry ((ix86_binary_operator_ok (PLUS, DImode, operands)) && (TARGET_64BIT))
#define HAVE_addcarrysi (ix86_binary_operator_ok (PLUS, SImode, operands))
#define HAVE_addcarrydi ((ix86_binary_operator_ok (PLUS, DImode, operands)) && (TARGET_64BIT))
#define HAVE_subqi3_carry (ix86_binary_operator_ok (MINUS, QImode, operands))
#define HAVE_subhi3_carry (ix86_binary_operator_ok (MINUS, HImode, operands))
#define HAVE_subsi3_carry (ix86_binary_operator_ok (MINUS, SImode, operands))
#define HAVE_subdi3_carry ((ix86_binary_operator_ok (MINUS, DImode, operands)) && (TARGET_64BIT))
#define HAVE_subborrowsi (ix86_binary_operator_ok (MINUS, SImode, operands))
#define HAVE_subborrowdi ((ix86_binary_operator_ok (MINUS, DImode, operands)) && (TARGET_64BIT))
#define HAVE_divmodsi4_1 1
#define HAVE_divmoddi4_1 (TARGET_64BIT)
#define HAVE_divmodhiqi3 (TARGET_QIMODE_MATH)
#define HAVE_udivmodsi4_1 1
#define HAVE_udivmoddi4_1 (TARGET_64BIT)
#define HAVE_udivmodhiqi3 (TARGET_QIMODE_MATH)
#define HAVE_kandnqi (TARGET_AVX512F)
#define HAVE_kandnhi (TARGET_AVX512F)
#define HAVE_andqi_ext_0 1
#define HAVE_kxnorqi (TARGET_AVX512F)
#define HAVE_kxnorhi (TARGET_AVX512F)
#define HAVE_kxnorsi (TARGET_AVX512BW)
#define HAVE_kxnordi (TARGET_AVX512BW)
#define HAVE_kortestzhi (TARGET_AVX512F && ix86_match_ccmode (insn, CCZmode))
#define HAVE_kortestchi (TARGET_AVX512F && ix86_match_ccmode (insn, CCCmode))
#define HAVE_kunpckhi (TARGET_AVX512F)
#define HAVE_kunpcksi (TARGET_AVX512BW)
#define HAVE_kunpckdi (TARGET_AVX512BW)
#define HAVE_copysignsf3_const ((SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| (TARGET_SSE && (SFmode == TFmode)))
#define HAVE_copysigndf3_const ((SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| (TARGET_SSE && (DFmode == TFmode)))
#define HAVE_copysigntf3_const ((SSE_FLOAT_MODE_P (TFmode) && TARGET_SSE_MATH) \
|| (TARGET_SSE && (TFmode == TFmode)))
#define HAVE_copysignsf3_var ((SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| (TARGET_SSE && (SFmode == TFmode)))
#define HAVE_copysigndf3_var ((SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| (TARGET_SSE && (DFmode == TFmode)))
#define HAVE_copysigntf3_var ((SSE_FLOAT_MODE_P (TFmode) && TARGET_SSE_MATH) \
|| (TARGET_SSE && (TFmode == TFmode)))
#define HAVE_x86_64_shld (TARGET_64BIT)
#define HAVE_x86_shld 1
#define HAVE_x86_64_shrd (TARGET_64BIT)
#define HAVE_x86_shrd 1
#define HAVE_ashrdi3_cvt (TARGET_64BIT && INTVAL (operands[2]) == 63 \
&& (TARGET_USE_CLTD || optimize_function_for_size_p (cfun)) \
&& ix86_binary_operator_ok (ASHIFTRT, DImode, operands))
#define HAVE_ashrsi3_cvt (INTVAL (operands[2]) == 31 \
&& (TARGET_USE_CLTD || optimize_function_for_size_p (cfun)) \
&& ix86_binary_operator_ok (ASHIFTRT, SImode, operands))
#define HAVE_ix86_rotldi3_doubleword (!TARGET_64BIT)
#define HAVE_ix86_rotlti3_doubleword (TARGET_64BIT)
#define HAVE_ix86_rotrdi3_doubleword (!TARGET_64BIT)
#define HAVE_ix86_rotrti3_doubleword (TARGET_64BIT)
#define HAVE_setcc_sf_sse (SSE_FLOAT_MODE_P (SFmode))
#define HAVE_setcc_df_sse (SSE_FLOAT_MODE_P (DFmode))
#define HAVE_jump 1
#define HAVE_blockage 1
#define HAVE_prologue_use 1
#define HAVE_simple_return_internal (reload_completed)
#define HAVE_simple_return_internal_long (reload_completed)
#define HAVE_simple_return_pop_internal (reload_completed)
#define HAVE_simple_return_indirect_internal (reload_completed)
#define HAVE_nop 1
#define HAVE_nops (reload_completed)
#define HAVE_pad 1
#define HAVE_set_got_rex64 (TARGET_64BIT)
#define HAVE_set_rip_rex64 (TARGET_64BIT)
#define HAVE_set_got_offset_rex64 (TARGET_LP64)
#define HAVE_eh_return_internal 1
#define HAVE_leave (!TARGET_64BIT)
#define HAVE_leave_rex64 (TARGET_64BIT)
#define HAVE_split_stack_return 1
#define HAVE_ffssi2_no_cmove (!TARGET_CMOVE)
#define HAVE_bmi_bextr_si (TARGET_BMI)
#define HAVE_bmi_bextr_di ((TARGET_BMI) && (TARGET_64BIT))
#define HAVE_bmi2_pdep_si3 (TARGET_BMI2)
#define HAVE_bmi2_pdep_di3 ((TARGET_BMI2) && (TARGET_64BIT))
#define HAVE_bmi2_pext_si3 (TARGET_BMI2)
#define HAVE_bmi2_pext_di3 ((TARGET_BMI2) && (TARGET_64BIT))
#define HAVE_tbm_bextri_si (TARGET_TBM)
#define HAVE_tbm_bextri_di ((TARGET_TBM) && (TARGET_64BIT))
#define HAVE_bsr_rex64 (TARGET_64BIT)
#define HAVE_bsr 1
#define HAVE_bswaphi_lowpart 1
#define HAVE_paritydi2_cmp (! TARGET_POPCNT)
#define HAVE_paritysi2_cmp (! TARGET_POPCNT)
#define HAVE_truncxfsf2_i387_noop_unspec (TARGET_USE_FANCY_MATH_387)
#define HAVE_truncxfdf2_i387_noop_unspec (TARGET_USE_FANCY_MATH_387)
#define HAVE_sqrtxf2 (TARGET_USE_FANCY_MATH_387)
#define HAVE_sqrt_extendsfxf2_i387 (TARGET_USE_FANCY_MATH_387)
#define HAVE_sqrt_extenddfxf2_i387 (TARGET_USE_FANCY_MATH_387)
#define HAVE_fpremxf4_i387 (TARGET_USE_FANCY_MATH_387 \
&& flag_finite_math_only)
#define HAVE_fprem1xf4_i387 (TARGET_USE_FANCY_MATH_387 \
&& flag_finite_math_only)
#define HAVE_sincosxf3 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_sincos_extendsfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_sincos_extenddfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_fptanxf4_i387 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations \
&& standard_80387_constant_p (operands[3]) == 2)
#define HAVE_fptan_extendsfxf4_i387 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations \
&& standard_80387_constant_p (operands[3]) == 2)
#define HAVE_fptan_extenddfxf4_i387 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations \
&& standard_80387_constant_p (operands[3]) == 2)
#define HAVE_fpatan_extendsfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_fpatan_extenddfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_fyl2xxf3_i387 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_fyl2x_extendsfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_fyl2x_extenddfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_fyl2xp1xf3_i387 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_fyl2xp1_extendsfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_fyl2xp1_extenddfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_fxtractxf3_i387 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_fxtract_extendsfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_fxtract_extenddfxf3_i387 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_fscalexf4_i387 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_sse4_1_roundsf2 (TARGET_ROUND)
#define HAVE_sse4_1_rounddf2 (TARGET_ROUND)
#define HAVE_rintxf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_fistdi2 (TARGET_USE_FANCY_MATH_387)
#define HAVE_fistdi2_with_temp (TARGET_USE_FANCY_MATH_387)
#define HAVE_fisthi2 (TARGET_USE_FANCY_MATH_387)
#define HAVE_fistsi2 (TARGET_USE_FANCY_MATH_387)
#define HAVE_fisthi2_with_temp (TARGET_USE_FANCY_MATH_387)
#define HAVE_fistsi2_with_temp (TARGET_USE_FANCY_MATH_387)
#define HAVE_frndintxf2_floor (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations \
&& can_create_pseudo_p ())
#define HAVE_frndintxf2_ceil (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations \
&& can_create_pseudo_p ())
#define HAVE_frndintxf2_trunc (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations \
&& can_create_pseudo_p ())
#define HAVE_frndintxf2_floor_i387 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_frndintxf2_ceil_i387 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_frndintxf2_trunc_i387 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_frndintxf2_mask_pm (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations \
&& can_create_pseudo_p ())
#define HAVE_frndintxf2_mask_pm_i387 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_fistdi2_floor (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_fistdi2_ceil (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_fistdi2_floor_with_temp (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_fistdi2_ceil_with_temp (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_fisthi2_floor (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_fisthi2_ceil (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_fistsi2_floor (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_fistsi2_ceil (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_fisthi2_floor_with_temp (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_fisthi2_ceil_with_temp (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_fistsi2_floor_with_temp (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_fistsi2_ceil_with_temp (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_fxamsf2_i387 (TARGET_USE_FANCY_MATH_387)
#define HAVE_fxamdf2_i387 (TARGET_USE_FANCY_MATH_387)
#define HAVE_fxamxf2_i387 (TARGET_USE_FANCY_MATH_387)
#define HAVE_fxamsf2_i387_with_temp (TARGET_USE_FANCY_MATH_387 \
&& can_create_pseudo_p ())
#define HAVE_fxamdf2_i387_with_temp (TARGET_USE_FANCY_MATH_387 \
&& can_create_pseudo_p ())
#define HAVE_movmsk_df (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)
#define HAVE_cld 1
#define HAVE_smaxsf3 (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH)
#define HAVE_sminsf3 (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH)
#define HAVE_smaxdf3 (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)
#define HAVE_smindf3 (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)
#define HAVE_pro_epilogue_adjust_stack_si_add (Pmode == SImode)
#define HAVE_pro_epilogue_adjust_stack_di_add (Pmode == DImode)
#define HAVE_pro_epilogue_adjust_stack_si_sub (Pmode == SImode)
#define HAVE_pro_epilogue_adjust_stack_di_sub (Pmode == DImode)
#define HAVE_allocate_stack_worker_probe_si ((ix86_target_stack_probe ()) && (Pmode == SImode))
#define HAVE_allocate_stack_worker_probe_di ((ix86_target_stack_probe ()) && (Pmode == DImode))
#define HAVE_adjust_stack_and_probesi (Pmode == SImode)
#define HAVE_adjust_stack_and_probedi (Pmode == DImode)
#define HAVE_probe_stack_rangesi (Pmode == SImode)
#define HAVE_probe_stack_rangedi (Pmode == DImode)
#define HAVE_trap 1
#define HAVE_stack_protect_set_si ((TARGET_SSP_TLS_GUARD) && (ptr_mode == SImode))
#define HAVE_stack_protect_set_di ((TARGET_SSP_TLS_GUARD) && (ptr_mode == DImode))
#define HAVE_stack_tls_protect_set_si (ptr_mode == SImode)
#define HAVE_stack_tls_protect_set_di (ptr_mode == DImode)
#define HAVE_stack_protect_test_si ((TARGET_SSP_TLS_GUARD) && (ptr_mode == SImode))
#define HAVE_stack_protect_test_di ((TARGET_SSP_TLS_GUARD) && (ptr_mode == DImode))
#define HAVE_stack_tls_protect_test_si (ptr_mode == SImode)
#define HAVE_stack_tls_protect_test_di (ptr_mode == DImode)
#define HAVE_sse4_2_crc32qi (TARGET_SSE4_2 || TARGET_CRC32)
#define HAVE_sse4_2_crc32hi (TARGET_SSE4_2 || TARGET_CRC32)
#define HAVE_sse4_2_crc32si (TARGET_SSE4_2 || TARGET_CRC32)
#define HAVE_sse4_2_crc32di (TARGET_64BIT && (TARGET_SSE4_2 || TARGET_CRC32))
#define HAVE_rdpmc (!TARGET_64BIT)
#define HAVE_rdpmc_rex64 (TARGET_64BIT)
#define HAVE_rdtsc (!TARGET_64BIT)
#define HAVE_rdtsc_rex64 (TARGET_64BIT)
#define HAVE_rdtscp (!TARGET_64BIT)
#define HAVE_rdtscp_rex64 (TARGET_64BIT)
#define HAVE_fxsave (TARGET_FXSR)
#define HAVE_fxsave64 (TARGET_64BIT && TARGET_FXSR)
#define HAVE_fxrstor (TARGET_FXSR)
#define HAVE_fxrstor64 (TARGET_64BIT && TARGET_FXSR)
#define HAVE_xsave (!TARGET_64BIT && TARGET_XSAVE)
#define HAVE_xsaveopt ((!TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVEOPT))
#define HAVE_xsavec ((!TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVEC))
#define HAVE_xsaves ((!TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVES))
#define HAVE_xsave_rex64 (TARGET_64BIT && TARGET_XSAVE)
#define HAVE_xsaveopt_rex64 ((TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVEOPT))
#define HAVE_xsavec_rex64 ((TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVEC))
#define HAVE_xsaves_rex64 ((TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVES))
#define HAVE_xsave64 (TARGET_64BIT && TARGET_XSAVE)
#define HAVE_xsaveopt64 ((TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVEOPT))
#define HAVE_xsavec64 ((TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVEC))
#define HAVE_xsaves64 ((TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVES))
#define HAVE_xrstor (!TARGET_64BIT && TARGET_XSAVE)
#define HAVE_xrstors ((!TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVES))
#define HAVE_xrstor_rex64 (TARGET_64BIT && TARGET_XSAVE)
#define HAVE_xrstors_rex64 ((TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVES))
#define HAVE_xrstor64 (TARGET_64BIT && TARGET_XSAVE)
#define HAVE_xrstors64 ((TARGET_64BIT && TARGET_XSAVE) && (TARGET_XSAVES))
#define HAVE_fnstenv (TARGET_80387)
#define HAVE_fldenv (TARGET_80387)
#define HAVE_fnstsw (TARGET_80387)
#define HAVE_fnclex (TARGET_80387)
#define HAVE_lwp_slwpcbsi ((TARGET_LWP) && (Pmode == SImode))
#define HAVE_lwp_slwpcbdi ((TARGET_LWP) && (Pmode == DImode))
#define HAVE_rdfsbasesi (TARGET_64BIT && TARGET_FSGSBASE)
#define HAVE_rdgsbasesi (TARGET_64BIT && TARGET_FSGSBASE)
#define HAVE_rdfsbasedi ((TARGET_64BIT && TARGET_FSGSBASE) && (TARGET_64BIT))
#define HAVE_rdgsbasedi ((TARGET_64BIT && TARGET_FSGSBASE) && (TARGET_64BIT))
#define HAVE_wrfsbasesi (TARGET_64BIT && TARGET_FSGSBASE)
#define HAVE_wrgsbasesi (TARGET_64BIT && TARGET_FSGSBASE)
#define HAVE_wrfsbasedi ((TARGET_64BIT && TARGET_FSGSBASE) && (TARGET_64BIT))
#define HAVE_wrgsbasedi ((TARGET_64BIT && TARGET_FSGSBASE) && (TARGET_64BIT))
#define HAVE_rdrandhi_1 (TARGET_RDRND)
#define HAVE_rdrandsi_1 (TARGET_RDRND)
#define HAVE_rdranddi_1 ((TARGET_RDRND) && (TARGET_64BIT))
#define HAVE_rdseedhi_1 (TARGET_RDSEED)
#define HAVE_rdseedsi_1 (TARGET_RDSEED)
#define HAVE_rdseeddi_1 ((TARGET_RDSEED) && (TARGET_64BIT))
#define HAVE_xbegin_1 (TARGET_RTM)
#define HAVE_xend (TARGET_RTM)
#define HAVE_xabort (TARGET_RTM)
#define HAVE_xtest_1 (TARGET_RTM)
#define HAVE_clwb (TARGET_CLWB)
#define HAVE_clflushopt (TARGET_CLFLUSHOPT)
#define HAVE_mwaitx (TARGET_MWAITX)
#define HAVE_monitorx_si ((TARGET_MWAITX) && (Pmode == SImode))
#define HAVE_monitorx_di ((TARGET_MWAITX) && (Pmode == DImode))
#define HAVE_clzero_si ((TARGET_CLZERO) && (Pmode == SImode))
#define HAVE_clzero_di ((TARGET_CLZERO) && (Pmode == DImode))
#define HAVE_move_size_reloc_si (TARGET_MPX)
#define HAVE_move_size_reloc_di ((TARGET_MPX) && (TARGET_64BIT))
#define HAVE_sse_movntq (TARGET_SSE || TARGET_3DNOW_A)
#define HAVE_mmx_ieee_maxv2sf3 (TARGET_3DNOW)
#define HAVE_mmx_ieee_minv2sf3 (TARGET_3DNOW)
#define HAVE_mmx_rcpv2sf2 (TARGET_3DNOW)
#define HAVE_mmx_rcpit1v2sf3 (TARGET_3DNOW)
#define HAVE_mmx_rcpit2v2sf3 (TARGET_3DNOW)
#define HAVE_mmx_rsqrtv2sf2 (TARGET_3DNOW)
#define HAVE_mmx_rsqit1v2sf3 (TARGET_3DNOW)
#define HAVE_mmx_haddv2sf3 (TARGET_3DNOW)
#define HAVE_mmx_hsubv2sf3 (TARGET_3DNOW_A)
#define HAVE_mmx_addsubv2sf3 (TARGET_3DNOW_A)
#define HAVE_mmx_gtv2sf3 (TARGET_3DNOW)
#define HAVE_mmx_gev2sf3 (TARGET_3DNOW)
#define HAVE_mmx_pf2id (TARGET_3DNOW)
#define HAVE_mmx_pf2iw (TARGET_3DNOW_A)
#define HAVE_mmx_pi2fw (TARGET_3DNOW_A)
#define HAVE_mmx_floatv2si2 (TARGET_3DNOW)
#define HAVE_mmx_pswapdv2sf2 (TARGET_3DNOW_A)
#define HAVE_mmx_ashrv4hi3 (TARGET_MMX)
#define HAVE_mmx_ashrv2si3 (TARGET_MMX)
#define HAVE_mmx_ashlv4hi3 (TARGET_MMX)
#define HAVE_mmx_lshrv4hi3 (TARGET_MMX)
#define HAVE_mmx_ashlv2si3 (TARGET_MMX)
#define HAVE_mmx_lshrv2si3 (TARGET_MMX)
#define HAVE_mmx_ashlv1di3 (TARGET_MMX)
#define HAVE_mmx_lshrv1di3 (TARGET_MMX)
#define HAVE_mmx_gtv8qi3 (TARGET_MMX)
#define HAVE_mmx_gtv4hi3 (TARGET_MMX)
#define HAVE_mmx_gtv2si3 (TARGET_MMX)
#define HAVE_mmx_andnotv8qi3 (TARGET_MMX)
#define HAVE_mmx_andnotv4hi3 (TARGET_MMX)
#define HAVE_mmx_andnotv2si3 (TARGET_MMX)
#define HAVE_mmx_packsswb (TARGET_MMX)
#define HAVE_mmx_packssdw (TARGET_MMX)
#define HAVE_mmx_packuswb (TARGET_MMX)
#define HAVE_mmx_punpckhbw (TARGET_MMX)
#define HAVE_mmx_punpcklbw (TARGET_MMX)
#define HAVE_mmx_punpckhwd (TARGET_MMX)
#define HAVE_mmx_punpcklwd (TARGET_MMX)
#define HAVE_mmx_punpckhdq (TARGET_MMX)
#define HAVE_mmx_punpckldq (TARGET_MMX)
#define HAVE_mmx_pextrw (TARGET_SSE || TARGET_3DNOW_A)
#define HAVE_mmx_pshufw_1 (TARGET_SSE || TARGET_3DNOW_A)
#define HAVE_mmx_pswapdv2si2 (TARGET_3DNOW_A)
#define HAVE_mmx_psadbw (TARGET_SSE || TARGET_3DNOW_A)
#define HAVE_mmx_pmovmskb (TARGET_SSE || TARGET_3DNOW_A)
#define HAVE_avx512f_loadv16si_mask (TARGET_AVX512F)
#define HAVE_avx512vl_loadv8si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_loadv4si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_loadv8di_mask (TARGET_AVX512F)
#define HAVE_avx512vl_loadv4di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_loadv2di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_loadv16sf_mask (TARGET_AVX512F)
#define HAVE_avx512vl_loadv8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_loadv4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_loadv8df_mask (TARGET_AVX512F)
#define HAVE_avx512vl_loadv4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_loadv2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512bw_loadv64qi_mask (TARGET_AVX512BW)
#define HAVE_avx512vl_loadv16qi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_loadv32qi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512bw_loadv32hi_mask (TARGET_AVX512BW)
#define HAVE_avx512vl_loadv16hi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_loadv8hi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512f_blendmv16si (TARGET_AVX512F)
#define HAVE_avx512vl_blendmv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_blendmv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_blendmv8di (TARGET_AVX512F)
#define HAVE_avx512vl_blendmv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_blendmv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_blendmv16sf (TARGET_AVX512F)
#define HAVE_avx512vl_blendmv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_blendmv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_blendmv8df (TARGET_AVX512F)
#define HAVE_avx512vl_blendmv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_blendmv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512bw_blendmv64qi (TARGET_AVX512BW)
#define HAVE_avx512vl_blendmv16qi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_blendmv32qi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512bw_blendmv32hi (TARGET_AVX512BW)
#define HAVE_avx512vl_blendmv16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_blendmv8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512f_storev16si_mask (TARGET_AVX512F)
#define HAVE_avx512vl_storev8si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_storev4si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_storev8di_mask (TARGET_AVX512F)
#define HAVE_avx512vl_storev4di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_storev2di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_storev16sf_mask (TARGET_AVX512F)
#define HAVE_avx512vl_storev8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_storev4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_storev8df_mask (TARGET_AVX512F)
#define HAVE_avx512vl_storev4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_storev2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512bw_storev64qi_mask (TARGET_AVX512BW)
#define HAVE_avx512vl_storev16qi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_storev32qi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512bw_storev32hi_mask (TARGET_AVX512BW)
#define HAVE_avx512vl_storev16hi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_storev8hi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_sse2_movq128 (TARGET_SSE2)
#define HAVE_movdi_to_sse (!TARGET_64BIT && TARGET_SSE2 && TARGET_INTER_UNIT_MOVES_TO_VEC)
#define HAVE_avx512f_storeups512 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_avx_storeups256 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_sse_storeups (TARGET_SSE)
#define HAVE_avx512f_storeupd512 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_avx_storeupd256 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_sse2_storeupd ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_avx512f_storeups512_mask (TARGET_AVX512F)
#define HAVE_avx512vl_storeups256_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_storeups_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_storeupd512_mask (TARGET_AVX512F)
#define HAVE_avx512vl_storeupd256_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_storeupd_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx_storedquv32qi ((TARGET_SSE2) && (TARGET_AVX))
#define HAVE_sse2_storedquv16qi (TARGET_SSE2)
#define HAVE_avx512f_storedquv64qi (TARGET_AVX512BW)
#define HAVE_avx512bw_storedquv32hi (TARGET_AVX512BW)
#define HAVE_avx512vl_storedquv8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_storedquv16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512f_storedquv16si (TARGET_AVX512F)
#define HAVE_avx_storedquv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_sse2_storedquv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_storedquv8di (TARGET_AVX512F)
#define HAVE_avx512vl_storedquv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_storedquv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_storedquv16si_mask (TARGET_AVX512F)
#define HAVE_avx512vl_storedquv8si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_storedquv4si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_storedquv8di_mask (TARGET_AVX512F)
#define HAVE_avx512vl_storedquv4di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_storedquv2di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512bw_storedquv64qi_mask (TARGET_AVX512BW)
#define HAVE_avx512vl_storedquv16qi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_storedquv32qi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512bw_storedquv32hi_mask (TARGET_AVX512BW)
#define HAVE_avx512vl_storedquv16hi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_storedquv8hi_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx_lddqu256 ((TARGET_SSE3) && (TARGET_AVX))
#define HAVE_sse3_lddqu (TARGET_SSE3)
#define HAVE_sse2_movntisi (TARGET_SSE2)
#define HAVE_sse2_movntidi ((TARGET_SSE2) && (TARGET_64BIT))
#define HAVE_avx512f_movntv16sf ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_avx_movntv8sf ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_sse_movntv4sf (TARGET_SSE)
#define HAVE_avx512f_movntv8df ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_avx_movntv4df ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_sse2_movntv2df ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_avx512f_movntv8di ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_avx_movntv4di ((TARGET_SSE2) && (TARGET_AVX))
#define HAVE_sse2_movntv2di (TARGET_SSE2)
#define HAVE_sse_vmaddv4sf3 (TARGET_SSE)
#define HAVE_sse_vmaddv4sf3_round ((TARGET_AVX512F) && (TARGET_SSE))
#define HAVE_sse_vmsubv4sf3 (TARGET_SSE)
#define HAVE_sse_vmsubv4sf3_round ((TARGET_AVX512F) && (TARGET_SSE))
#define HAVE_sse2_vmaddv2df3 ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_sse2_vmaddv2df3_round ((TARGET_AVX512F) && ((TARGET_SSE) && (TARGET_SSE2)))
#define HAVE_sse2_vmsubv2df3 ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_sse2_vmsubv2df3_round ((TARGET_AVX512F) && ((TARGET_SSE) && (TARGET_SSE2)))
#define HAVE_sse_vmmulv4sf3 (TARGET_SSE)
#define HAVE_sse_vmmulv4sf3_round ((TARGET_AVX512F) && (TARGET_SSE))
#define HAVE_sse_vmdivv4sf3 (TARGET_SSE)
#define HAVE_sse_vmdivv4sf3_round ((TARGET_AVX512F) && (TARGET_SSE))
#define HAVE_sse2_vmmulv2df3 ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_sse2_vmmulv2df3_round ((TARGET_AVX512F) && ((TARGET_SSE) && (TARGET_SSE2)))
#define HAVE_sse2_vmdivv2df3 ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_sse2_vmdivv2df3_round ((TARGET_AVX512F) && ((TARGET_SSE) && (TARGET_SSE2)))
#define HAVE_avx512f_divv16sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_divv16sf3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_avx512f_divv16sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_avx512f_divv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_avx_divv8sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
#define HAVE_avx_divv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_sse_divv4sf3 (TARGET_SSE && 1 && 1)
#define HAVE_sse_divv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1))
#define HAVE_avx512f_divv8df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_divv8df3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_avx512f_divv8df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_avx512f_divv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_avx_divv4df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
#define HAVE_avx_divv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_sse2_divv2df3 ((TARGET_SSE && 1 && 1) && (TARGET_SSE2))
#define HAVE_sse2_divv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
#define HAVE_avx_rcpv8sf2 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_sse_rcpv4sf2 (TARGET_SSE)
#define HAVE_sse_vmrcpv4sf2 (TARGET_SSE)
#define HAVE_rcp14v16sf_mask (TARGET_AVX512F)
#define HAVE_rcp14v8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_rcp14v4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_rcp14v8df_mask (TARGET_AVX512F)
#define HAVE_rcp14v4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_rcp14v2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_srcp14v4sf (TARGET_AVX512F)
#define HAVE_srcp14v2df ((TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_avx512f_sqrtv16sf2 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_sqrtv16sf2_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_avx512f_sqrtv16sf2_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_avx512f_sqrtv16sf2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_avx_sqrtv8sf2 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
#define HAVE_avx_sqrtv8sf2_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_sse_sqrtv4sf2 (TARGET_SSE && 1 && 1)
#define HAVE_sse_sqrtv4sf2_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1))
#define HAVE_avx512f_sqrtv8df2 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_sqrtv8df2_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_avx512f_sqrtv8df2_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_avx512f_sqrtv8df2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_avx_sqrtv4df2 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
#define HAVE_avx_sqrtv4df2_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_sse2_sqrtv2df2 ((TARGET_SSE && 1 && 1) && (TARGET_SSE2))
#define HAVE_sse2_sqrtv2df2_mask ((TARGET_AVX512F) && ((TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
#define HAVE_sse_vmsqrtv4sf2 (TARGET_SSE)
#define HAVE_sse_vmsqrtv4sf2_round ((TARGET_AVX512F) && (TARGET_SSE))
#define HAVE_sse2_vmsqrtv2df2 ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_sse2_vmsqrtv2df2_round ((TARGET_AVX512F) && ((TARGET_SSE) && (TARGET_SSE2)))
#define HAVE_avx_rsqrtv8sf2 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_sse_rsqrtv4sf2 (TARGET_SSE)
#define HAVE_rsqrt14v16sf_mask (TARGET_AVX512F)
#define HAVE_rsqrt14v8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_rsqrt14v4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_rsqrt14v8df_mask (TARGET_AVX512F)
#define HAVE_rsqrt14v4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_rsqrt14v2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_rsqrt14v4sf (TARGET_AVX512F)
#define HAVE_rsqrt14v2df ((TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_sse_vmrsqrtv4sf2 (TARGET_SSE)
#define HAVE_ieee_maxv16sf3 ((TARGET_SSE \
&& 1 && 1) && (TARGET_AVX512F))
#define HAVE_ieee_maxv16sf3_round ((TARGET_AVX512F) && ((TARGET_SSE \
&& 1 && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_ieee_maxv16sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
&& (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_ieee_maxv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE \
&& (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_ieee_minv16sf3 ((TARGET_SSE \
&& 1 && 1) && (TARGET_AVX512F))
#define HAVE_ieee_minv16sf3_round ((TARGET_AVX512F) && ((TARGET_SSE \
&& 1 && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_ieee_minv16sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
&& (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_ieee_minv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE \
&& (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_ieee_maxv8sf3 ((TARGET_SSE \
&& 1 && 1) && (TARGET_AVX))
#define HAVE_ieee_maxv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
&& (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_ieee_minv8sf3 ((TARGET_SSE \
&& 1 && 1) && (TARGET_AVX))
#define HAVE_ieee_minv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
&& (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_ieee_maxv4sf3 (TARGET_SSE \
&& 1 && 1)
#define HAVE_ieee_maxv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE \
&& (16 == 64 || TARGET_AVX512VL) && 1))
#define HAVE_ieee_minv4sf3 (TARGET_SSE \
&& 1 && 1)
#define HAVE_ieee_minv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE \
&& (16 == 64 || TARGET_AVX512VL) && 1))
#define HAVE_ieee_maxv8df3 ((TARGET_SSE \
&& 1 && 1) && (TARGET_AVX512F))
#define HAVE_ieee_maxv8df3_round ((TARGET_AVX512F) && ((TARGET_SSE \
&& 1 && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_ieee_maxv8df3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
&& (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_ieee_maxv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE \
&& (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_ieee_minv8df3 ((TARGET_SSE \
&& 1 && 1) && (TARGET_AVX512F))
#define HAVE_ieee_minv8df3_round ((TARGET_AVX512F) && ((TARGET_SSE \
&& 1 && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_ieee_minv8df3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
&& (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_ieee_minv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE \
&& (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_ieee_maxv4df3 ((TARGET_SSE \
&& 1 && 1) && (TARGET_AVX))
#define HAVE_ieee_maxv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
&& (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_ieee_minv4df3 ((TARGET_SSE \
&& 1 && 1) && (TARGET_AVX))
#define HAVE_ieee_minv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
&& (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_ieee_maxv2df3 ((TARGET_SSE \
&& 1 && 1) && (TARGET_SSE2))
#define HAVE_ieee_maxv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
&& (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
#define HAVE_ieee_minv2df3 ((TARGET_SSE \
&& 1 && 1) && (TARGET_SSE2))
#define HAVE_ieee_minv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE \
&& (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
#define HAVE_sse_vmsmaxv4sf3 (TARGET_SSE)
#define HAVE_sse_vmsmaxv4sf3_round ((TARGET_AVX512F) && (TARGET_SSE))
#define HAVE_sse_vmsminv4sf3 (TARGET_SSE)
#define HAVE_sse_vmsminv4sf3_round ((TARGET_AVX512F) && (TARGET_SSE))
#define HAVE_sse2_vmsmaxv2df3 ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_sse2_vmsmaxv2df3_round ((TARGET_AVX512F) && ((TARGET_SSE) && (TARGET_SSE2)))
#define HAVE_sse2_vmsminv2df3 ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_sse2_vmsminv2df3_round ((TARGET_AVX512F) && ((TARGET_SSE) && (TARGET_SSE2)))
#define HAVE_avx_addsubv4df3 (TARGET_AVX)
#define HAVE_sse3_addsubv2df3 (TARGET_SSE3)
#define HAVE_avx_addsubv8sf3 (TARGET_AVX)
#define HAVE_sse3_addsubv4sf3 (TARGET_SSE3)
#define HAVE_avx_haddv4df3 (TARGET_AVX)
#define HAVE_avx_hsubv4df3 (TARGET_AVX)
#define HAVE_sse3_hsubv2df3 (TARGET_SSE3)
#define HAVE_avx_haddv8sf3 (TARGET_AVX)
#define HAVE_avx_hsubv8sf3 (TARGET_AVX)
#define HAVE_sse3_haddv4sf3 (TARGET_SSE3)
#define HAVE_sse3_hsubv4sf3 (TARGET_SSE3)
#define HAVE_reducepv16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_reducepv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_reducepv4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_reducepv8df_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_reducepv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_reducepv2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_reducesv4sf (TARGET_AVX512DQ)
#define HAVE_reducesv2df ((TARGET_AVX512DQ) && (TARGET_SSE2))
#define HAVE_avx_cmpv8sf3 (TARGET_AVX)
#define HAVE_avx_cmpv4sf3 (TARGET_AVX)
#define HAVE_avx_cmpv4df3 (TARGET_AVX)
#define HAVE_avx_cmpv2df3 ((TARGET_AVX) && (TARGET_SSE2))
#define HAVE_avx_vmcmpv4sf3 (TARGET_AVX)
#define HAVE_avx_vmcmpv2df3 ((TARGET_AVX) && (TARGET_SSE2))
#define HAVE_avx_maskcmpv8sf3 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_sse_maskcmpv4sf3 (TARGET_SSE)
#define HAVE_avx_maskcmpv4df3 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_sse2_maskcmpv2df3 ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_sse_vmmaskcmpv4sf3 (TARGET_SSE)
#define HAVE_sse2_vmmaskcmpv2df3 ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_avx512f_cmpv16si3 (TARGET_AVX512F && 1)
#define HAVE_avx512f_cmpv16si3_mask ((TARGET_AVX512F) && (TARGET_AVX512F && 1))
#define HAVE_avx512f_cmpv16si3_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SImode == V16SFmode \
|| V16SImode == V8DFmode \
|| V16SImode == V8DImode \
|| V16SImode == V16SImode)))
#define HAVE_avx512f_cmpv16si3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SImode == V16SFmode \
|| V16SImode == V8DFmode \
|| V16SImode == V8DImode \
|| V16SImode == V16SImode))))
#define HAVE_avx512vl_cmpv8si3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cmpv8si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_cmpv4si3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cmpv4si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
#define HAVE_avx512f_cmpv8di3 (TARGET_AVX512F && 1)
#define HAVE_avx512f_cmpv8di3_mask ((TARGET_AVX512F) && (TARGET_AVX512F && 1))
#define HAVE_avx512f_cmpv8di3_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DImode == V16SFmode \
|| V8DImode == V8DFmode \
|| V8DImode == V8DImode \
|| V8DImode == V16SImode)))
#define HAVE_avx512f_cmpv8di3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DImode == V16SFmode \
|| V8DImode == V8DFmode \
|| V8DImode == V8DImode \
|| V8DImode == V16SImode))))
#define HAVE_avx512vl_cmpv4di3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cmpv4di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_cmpv2di3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cmpv2di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
#define HAVE_avx512f_cmpv16sf3 (TARGET_AVX512F && 1)
#define HAVE_avx512f_cmpv16sf3_mask ((TARGET_AVX512F) && (TARGET_AVX512F && 1))
#define HAVE_avx512f_cmpv16sf3_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)))
#define HAVE_avx512f_cmpv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode))))
#define HAVE_avx512vl_cmpv8sf3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cmpv8sf3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_cmpv4sf3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cmpv4sf3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
#define HAVE_avx512f_cmpv8df3 (TARGET_AVX512F && 1)
#define HAVE_avx512f_cmpv8df3_mask ((TARGET_AVX512F) && (TARGET_AVX512F && 1))
#define HAVE_avx512f_cmpv8df3_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)))
#define HAVE_avx512f_cmpv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode))))
#define HAVE_avx512vl_cmpv4df3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cmpv4df3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_cmpv2df3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cmpv2df3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_cmpv64qi3 (TARGET_AVX512BW)
#define HAVE_avx512bw_cmpv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512vl_cmpv16qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cmpv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_cmpv32qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cmpv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_cmpv32hi3 (TARGET_AVX512BW)
#define HAVE_avx512bw_cmpv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512vl_cmpv16hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cmpv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_cmpv8hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cmpv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_ucmpv64qi3 (TARGET_AVX512BW)
#define HAVE_avx512bw_ucmpv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512vl_ucmpv16qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_ucmpv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_ucmpv32qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_ucmpv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_ucmpv32hi3 (TARGET_AVX512BW)
#define HAVE_avx512bw_ucmpv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512vl_ucmpv16hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_ucmpv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_ucmpv8hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_ucmpv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512f_ucmpv16si3 (TARGET_AVX512F)
#define HAVE_avx512f_ucmpv16si3_mask (TARGET_AVX512F)
#define HAVE_avx512vl_ucmpv8si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_ucmpv8si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_ucmpv4si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_ucmpv4si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_ucmpv8di3 (TARGET_AVX512F)
#define HAVE_avx512f_ucmpv8di3_mask (TARGET_AVX512F)
#define HAVE_avx512vl_ucmpv4di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_ucmpv4di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_ucmpv2di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_ucmpv2di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_vmcmpv4sf3 (TARGET_AVX512F)
#define HAVE_avx512f_vmcmpv4sf3_round (TARGET_AVX512F)
#define HAVE_avx512f_vmcmpv2df3 ((TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_avx512f_vmcmpv2df3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
#define HAVE_avx512f_vmcmpv4sf3_mask (TARGET_AVX512F)
#define HAVE_avx512f_vmcmpv4sf3_mask_round (TARGET_AVX512F)
#define HAVE_avx512f_vmcmpv2df3_mask ((TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_avx512f_vmcmpv2df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
#define HAVE_avx512f_maskcmpv16sf3 (TARGET_AVX512F)
#define HAVE_avx512f_maskcmpv8sf3 ((TARGET_AVX512F) && (TARGET_AVX))
#define HAVE_avx512f_maskcmpv4sf3 (TARGET_AVX512F)
#define HAVE_avx512f_maskcmpv8df3 (TARGET_AVX512F)
#define HAVE_avx512f_maskcmpv4df3 ((TARGET_AVX512F) && (TARGET_AVX))
#define HAVE_avx512f_maskcmpv2df3 ((TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_sse_comi (SSE_FLOAT_MODE_P (SFmode))
#define HAVE_sse_comi_round ((TARGET_AVX512F) && (SSE_FLOAT_MODE_P (SFmode)))
#define HAVE_sse2_comi (SSE_FLOAT_MODE_P (DFmode))
#define HAVE_sse2_comi_round ((TARGET_AVX512F) && (SSE_FLOAT_MODE_P (DFmode)))
#define HAVE_sse_ucomi (SSE_FLOAT_MODE_P (SFmode))
#define HAVE_sse_ucomi_round ((TARGET_AVX512F) && (SSE_FLOAT_MODE_P (SFmode)))
#define HAVE_sse2_ucomi (SSE_FLOAT_MODE_P (DFmode))
#define HAVE_sse2_ucomi_round ((TARGET_AVX512F) && (SSE_FLOAT_MODE_P (DFmode)))
#define HAVE_avx_andnotv8sf3 ((TARGET_SSE && 1) && (TARGET_AVX))
#define HAVE_avx_andnotv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_AVX)))
#define HAVE_sse_andnotv4sf3 (TARGET_SSE && 1)
#define HAVE_sse_andnotv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && TARGET_AVX512VL))
#define HAVE_avx_andnotv4df3 ((TARGET_SSE && 1) && (TARGET_AVX))
#define HAVE_avx_andnotv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_AVX)))
#define HAVE_sse2_andnotv2df3 ((TARGET_SSE && 1) && (TARGET_SSE2))
#define HAVE_sse2_andnotv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_SSE2)))
#define HAVE_avx512f_andnotv16sf3 (TARGET_AVX512F)
#define HAVE_avx512f_andnotv16sf3_mask (TARGET_AVX512F)
#define HAVE_avx512f_andnotv8df3 (TARGET_AVX512F)
#define HAVE_avx512f_andnotv8df3_mask (TARGET_AVX512F)
#define HAVE_fma_fmadd_v16sf_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
#define HAVE_fma_fmadd_v16sf_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)))
#define HAVE_fma_fmadd_v8sf_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fmadd_v4sf_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fmadd_v8df_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
#define HAVE_fma_fmadd_v8df_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)))
#define HAVE_fma_fmadd_v4df_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fmadd_v2df_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_avx512f_fmadd_v16sf_mask (TARGET_AVX512F && 1)
#define HAVE_avx512f_fmadd_v16sf_mask_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)))
#define HAVE_avx512vl_fmadd_v8sf_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmadd_v4sf_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512f_fmadd_v8df_mask (TARGET_AVX512F && 1)
#define HAVE_avx512f_fmadd_v8df_mask_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)))
#define HAVE_avx512vl_fmadd_v4df_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmadd_v2df_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512f_fmadd_v16sf_mask3 (TARGET_AVX512F)
#define HAVE_avx512f_fmadd_v16sf_mask3_round (TARGET_AVX512F)
#define HAVE_avx512vl_fmadd_v8sf_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmadd_v8sf_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fmadd_v4sf_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmadd_v4sf_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_fmadd_v8df_mask3 (TARGET_AVX512F)
#define HAVE_avx512f_fmadd_v8df_mask3_round (TARGET_AVX512F)
#define HAVE_avx512vl_fmadd_v4df_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmadd_v4df_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fmadd_v2df_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmadd_v2df_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_fma_fmsub_v16sf_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
#define HAVE_fma_fmsub_v16sf_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)))
#define HAVE_fma_fmsub_v8sf_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fmsub_v4sf_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fmsub_v8df_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
#define HAVE_fma_fmsub_v8df_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)))
#define HAVE_fma_fmsub_v4df_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fmsub_v2df_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_avx512f_fmsub_v16sf_mask (TARGET_AVX512F)
#define HAVE_avx512f_fmsub_v16sf_mask_round (TARGET_AVX512F)
#define HAVE_avx512vl_fmsub_v8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmsub_v8sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fmsub_v4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmsub_v4sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_fmsub_v8df_mask (TARGET_AVX512F)
#define HAVE_avx512f_fmsub_v8df_mask_round (TARGET_AVX512F)
#define HAVE_avx512vl_fmsub_v4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmsub_v4df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fmsub_v2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmsub_v2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_fmsub_v16sf_mask3 (TARGET_AVX512F && 1)
#define HAVE_avx512f_fmsub_v16sf_mask3_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)))
#define HAVE_avx512vl_fmsub_v8sf_mask3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmsub_v4sf_mask3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512f_fmsub_v8df_mask3 (TARGET_AVX512F && 1)
#define HAVE_avx512f_fmsub_v8df_mask3_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)))
#define HAVE_avx512vl_fmsub_v4df_mask3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmsub_v2df_mask3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fnmadd_v16sf_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
#define HAVE_fma_fnmadd_v16sf_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)))
#define HAVE_fma_fnmadd_v8sf_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fnmadd_v4sf_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fnmadd_v8df_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
#define HAVE_fma_fnmadd_v8df_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)))
#define HAVE_fma_fnmadd_v4df_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fnmadd_v2df_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_avx512f_fnmadd_v16sf_mask (TARGET_AVX512F && 1)
#define HAVE_avx512f_fnmadd_v16sf_mask_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)))
#define HAVE_avx512vl_fnmadd_v8sf_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fnmadd_v4sf_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512f_fnmadd_v8df_mask (TARGET_AVX512F && 1)
#define HAVE_avx512f_fnmadd_v8df_mask_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)))
#define HAVE_avx512vl_fnmadd_v4df_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fnmadd_v2df_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512f_fnmadd_v16sf_mask3 (TARGET_AVX512F && 1)
#define HAVE_avx512f_fnmadd_v16sf_mask3_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)))
#define HAVE_avx512vl_fnmadd_v8sf_mask3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fnmadd_v4sf_mask3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512f_fnmadd_v8df_mask3 (TARGET_AVX512F && 1)
#define HAVE_avx512f_fnmadd_v8df_mask3_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)))
#define HAVE_avx512vl_fnmadd_v4df_mask3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fnmadd_v2df_mask3 ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fnmsub_v16sf_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
#define HAVE_fma_fnmsub_v16sf_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)))
#define HAVE_fma_fnmsub_v8sf_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fnmsub_v4sf_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fnmsub_v8df_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
#define HAVE_fma_fnmsub_v8df_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)))
#define HAVE_fma_fnmsub_v4df_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fnmsub_v2df_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_avx512f_fnmsub_v16sf_mask (TARGET_AVX512F && 1)
#define HAVE_avx512f_fnmsub_v16sf_mask_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)))
#define HAVE_avx512vl_fnmsub_v8sf_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fnmsub_v4sf_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512f_fnmsub_v8df_mask (TARGET_AVX512F && 1)
#define HAVE_avx512f_fnmsub_v8df_mask_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)))
#define HAVE_avx512vl_fnmsub_v4df_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fnmsub_v2df_mask ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512f_fnmsub_v16sf_mask3 (TARGET_AVX512F)
#define HAVE_avx512f_fnmsub_v16sf_mask3_round (TARGET_AVX512F)
#define HAVE_avx512vl_fnmsub_v8sf_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fnmsub_v8sf_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fnmsub_v4sf_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fnmsub_v4sf_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_fnmsub_v8df_mask3 (TARGET_AVX512F)
#define HAVE_avx512f_fnmsub_v8df_mask3_round (TARGET_AVX512F)
#define HAVE_avx512vl_fnmsub_v4df_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fnmsub_v4df_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fnmsub_v2df_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fnmsub_v2df_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_fma_fmaddsub_v16sf_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
#define HAVE_fma_fmaddsub_v16sf_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)))
#define HAVE_fma_fmaddsub_v8sf_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fmaddsub_v4sf_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fmaddsub_v8df_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
#define HAVE_fma_fmaddsub_v8df_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)))
#define HAVE_fma_fmaddsub_v4df_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fmaddsub_v2df_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_avx512f_fmaddsub_v16sf_mask (TARGET_AVX512F)
#define HAVE_avx512f_fmaddsub_v16sf_mask_round (TARGET_AVX512F)
#define HAVE_avx512vl_fmaddsub_v8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmaddsub_v8sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fmaddsub_v4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmaddsub_v4sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_fmaddsub_v8df_mask (TARGET_AVX512F)
#define HAVE_avx512f_fmaddsub_v8df_mask_round (TARGET_AVX512F)
#define HAVE_avx512vl_fmaddsub_v4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmaddsub_v4df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fmaddsub_v2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmaddsub_v2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_fmaddsub_v16sf_mask3 (TARGET_AVX512F)
#define HAVE_avx512f_fmaddsub_v16sf_mask3_round (TARGET_AVX512F)
#define HAVE_avx512vl_fmaddsub_v8sf_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmaddsub_v8sf_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fmaddsub_v4sf_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmaddsub_v4sf_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_fmaddsub_v8df_mask3 (TARGET_AVX512F)
#define HAVE_avx512f_fmaddsub_v8df_mask3_round (TARGET_AVX512F)
#define HAVE_avx512vl_fmaddsub_v4df_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmaddsub_v4df_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fmaddsub_v2df_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmaddsub_v2df_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_fma_fmsubadd_v16sf_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
#define HAVE_fma_fmsubadd_v16sf_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)))
#define HAVE_fma_fmsubadd_v8sf_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fmsubadd_v4sf_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fmsubadd_v8df_maskz_1 (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && 1)
#define HAVE_fma_fmsubadd_v8df_maskz_1_round ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)))
#define HAVE_fma_fmsubadd_v4df_maskz_1 ((TARGET_AVX512F && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_fma_fmsubadd_v2df_maskz_1 ((TARGET_AVX512F && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512VL))
#define HAVE_avx512f_fmsubadd_v16sf_mask (TARGET_AVX512F)
#define HAVE_avx512f_fmsubadd_v16sf_mask_round (TARGET_AVX512F)
#define HAVE_avx512vl_fmsubadd_v8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmsubadd_v8sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fmsubadd_v4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmsubadd_v4sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_fmsubadd_v8df_mask (TARGET_AVX512F)
#define HAVE_avx512f_fmsubadd_v8df_mask_round (TARGET_AVX512F)
#define HAVE_avx512vl_fmsubadd_v4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmsubadd_v4df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fmsubadd_v2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmsubadd_v2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_fmsubadd_v16sf_mask3 (TARGET_AVX512F)
#define HAVE_avx512f_fmsubadd_v16sf_mask3_round (TARGET_AVX512F)
#define HAVE_avx512vl_fmsubadd_v8sf_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmsubadd_v8sf_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fmsubadd_v4sf_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmsubadd_v4sf_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_fmsubadd_v8df_mask3 (TARGET_AVX512F)
#define HAVE_avx512f_fmsubadd_v8df_mask3_round (TARGET_AVX512F)
#define HAVE_avx512vl_fmsubadd_v4df_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmsubadd_v4df_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fmsubadd_v2df_mask3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmsubadd_v2df_mask3_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_sse_cvtpi2ps (TARGET_SSE)
#define HAVE_sse_cvtps2pi (TARGET_SSE)
#define HAVE_sse_cvttps2pi (TARGET_SSE)
#define HAVE_sse_cvtsi2ss (TARGET_SSE)
#define HAVE_sse_cvtsi2ss_round ((TARGET_AVX512F) && (TARGET_SSE))
#define HAVE_sse_cvtsi2ssq (TARGET_SSE && TARGET_64BIT)
#define HAVE_sse_cvtsi2ssq_round ((TARGET_AVX512F) && (TARGET_SSE && TARGET_64BIT))
#define HAVE_sse_cvtss2si (TARGET_SSE)
#define HAVE_sse_cvtss2si_round ((TARGET_AVX512F) && (TARGET_SSE))
#define HAVE_sse_cvtss2si_2 (TARGET_SSE)
#define HAVE_sse_cvtss2siq (TARGET_SSE && TARGET_64BIT)
#define HAVE_sse_cvtss2siq_round ((TARGET_AVX512F) && (TARGET_SSE && TARGET_64BIT))
#define HAVE_sse_cvtss2siq_2 (TARGET_SSE && TARGET_64BIT)
#define HAVE_sse_cvttss2si (TARGET_SSE)
#define HAVE_sse_cvttss2si_round ((TARGET_AVX512F) && (TARGET_SSE))
#define HAVE_sse_cvttss2siq (TARGET_SSE && TARGET_64BIT)
#define HAVE_sse_cvttss2siq_round ((TARGET_AVX512F) && (TARGET_SSE && TARGET_64BIT))
#define HAVE_cvtusi2ss32 (TARGET_AVX512F && 1)
#define HAVE_cvtusi2ss32_round ((TARGET_AVX512F) && (TARGET_AVX512F && (V4SFmode == V4SFmode)))
#define HAVE_cvtusi2sd32 ((TARGET_AVX512F && 1) && (TARGET_SSE2))
#define HAVE_cvtusi2ss64 (TARGET_AVX512F && TARGET_64BIT)
#define HAVE_cvtusi2ss64_round ((TARGET_AVX512F) && (TARGET_AVX512F && TARGET_64BIT))
#define HAVE_cvtusi2sd64 ((TARGET_AVX512F && TARGET_64BIT) && (TARGET_SSE2))
#define HAVE_cvtusi2sd64_round ((TARGET_AVX512F) && ((TARGET_AVX512F && TARGET_64BIT) && (TARGET_SSE2)))
#define HAVE_floatv16siv16sf2 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512F))
#define HAVE_floatv16siv16sf2_round ((TARGET_AVX512F) && ((TARGET_SSE2 && 1 && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_floatv16siv16sf2_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_floatv16siv16sf2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_floatv8siv8sf2 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX))
#define HAVE_floatv8siv8sf2_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_floatv4siv4sf2 (TARGET_SSE2 && 1 && 1)
#define HAVE_floatv4siv4sf2_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && 1))
#define HAVE_ufloatv16siv16sf2 (TARGET_AVX512F)
#define HAVE_ufloatv16siv16sf2_round (TARGET_AVX512F)
#define HAVE_ufloatv16siv16sf2_mask (TARGET_AVX512F)
#define HAVE_ufloatv16siv16sf2_mask_round (TARGET_AVX512F)
#define HAVE_ufloatv8siv8sf2 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_ufloatv8siv8sf2_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_ufloatv8siv8sf2_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_ufloatv8siv8sf2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_ufloatv4siv4sf2 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_ufloatv4siv4sf2_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_ufloatv4siv4sf2_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_ufloatv4siv4sf2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx_fix_notruncv8sfv8si ((TARGET_SSE2 && 1) && (TARGET_AVX))
#define HAVE_avx_fix_notruncv8sfv8si_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX)))
#define HAVE_sse2_fix_notruncv4sfv4si (TARGET_SSE2 && 1)
#define HAVE_sse2_fix_notruncv4sfv4si_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL)))
#define HAVE_avx512f_fix_notruncv16sfv16si (TARGET_AVX512F)
#define HAVE_avx512f_fix_notruncv16sfv16si_round (TARGET_AVX512F)
#define HAVE_avx512f_fix_notruncv16sfv16si_mask (TARGET_AVX512F)
#define HAVE_avx512f_fix_notruncv16sfv16si_mask_round (TARGET_AVX512F)
#define HAVE_avx512f_ufix_notruncv16sfv16si_mask (TARGET_AVX512F)
#define HAVE_avx512f_ufix_notruncv16sfv16si_mask_round (TARGET_AVX512F)
#define HAVE_avx512vl_ufix_notruncv8sfv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_ufix_notruncv8sfv8si_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512vl_ufix_notruncv4sfv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_ufix_notruncv4sfv4si_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512dq_cvtps2qqv8di_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
#define HAVE_avx512dq_cvtps2qqv8di_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DImode == V16SFmode \
|| V8DImode == V8DFmode \
|| V8DImode == V8DImode \
|| V8DImode == V16SImode))))
#define HAVE_avx512dq_cvtps2qqv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_avx512dq_cvtps2qqv2di_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && TARGET_AVX512VL))
#define HAVE_avx512dq_cvtps2uqqv8di_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
#define HAVE_avx512dq_cvtps2uqqv8di_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DImode == V16SFmode \
|| V8DImode == V8DFmode \
|| V8DImode == V8DImode \
|| V8DImode == V16SImode))))
#define HAVE_avx512dq_cvtps2uqqv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_avx512dq_cvtps2uqqv2di_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && TARGET_AVX512VL))
#define HAVE_fix_truncv16sfv16si2 (TARGET_AVX512F)
#define HAVE_fix_truncv16sfv16si2_round (TARGET_AVX512F)
#define HAVE_fix_truncv16sfv16si2_mask (TARGET_AVX512F)
#define HAVE_fix_truncv16sfv16si2_mask_round (TARGET_AVX512F)
#define HAVE_ufix_truncv16sfv16si2 (TARGET_AVX512F)
#define HAVE_ufix_truncv16sfv16si2_round (TARGET_AVX512F)
#define HAVE_ufix_truncv16sfv16si2_mask (TARGET_AVX512F)
#define HAVE_ufix_truncv16sfv16si2_mask_round (TARGET_AVX512F)
#define HAVE_fix_truncv8sfv8si2 (TARGET_AVX && 1)
#define HAVE_fix_truncv8sfv8si2_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
#define HAVE_fix_truncv4sfv4si2 (TARGET_SSE2 && 1)
#define HAVE_fix_truncv4sfv4si2_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
#define HAVE_sse2_cvtpi2pd (TARGET_SSE2)
#define HAVE_sse2_cvtpd2pi (TARGET_SSE2)
#define HAVE_sse2_cvttpd2pi (TARGET_SSE2)
#define HAVE_sse2_cvtsi2sd (TARGET_SSE2)
#define HAVE_sse2_cvtsi2sdq (TARGET_SSE2 && TARGET_64BIT)
#define HAVE_sse2_cvtsi2sdq_round ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_64BIT))
#define HAVE_avx512f_vcvtss2usi (TARGET_AVX512F)
#define HAVE_avx512f_vcvtss2usi_round (TARGET_AVX512F)
#define HAVE_avx512f_vcvtss2usiq (TARGET_AVX512F && TARGET_64BIT)
#define HAVE_avx512f_vcvtss2usiq_round ((TARGET_AVX512F) && (TARGET_AVX512F && TARGET_64BIT))
#define HAVE_avx512f_vcvttss2usi (TARGET_AVX512F)
#define HAVE_avx512f_vcvttss2usi_round (TARGET_AVX512F)
#define HAVE_avx512f_vcvttss2usiq (TARGET_AVX512F && TARGET_64BIT)
#define HAVE_avx512f_vcvttss2usiq_round ((TARGET_AVX512F) && (TARGET_AVX512F && TARGET_64BIT))
#define HAVE_avx512f_vcvtsd2usi (TARGET_AVX512F)
#define HAVE_avx512f_vcvtsd2usi_round (TARGET_AVX512F)
#define HAVE_avx512f_vcvtsd2usiq (TARGET_AVX512F && TARGET_64BIT)
#define HAVE_avx512f_vcvtsd2usiq_round ((TARGET_AVX512F) && (TARGET_AVX512F && TARGET_64BIT))
#define HAVE_avx512f_vcvttsd2usi (TARGET_AVX512F)
#define HAVE_avx512f_vcvttsd2usi_round (TARGET_AVX512F)
#define HAVE_avx512f_vcvttsd2usiq (TARGET_AVX512F && TARGET_64BIT)
#define HAVE_avx512f_vcvttsd2usiq_round ((TARGET_AVX512F) && (TARGET_AVX512F && TARGET_64BIT))
#define HAVE_sse2_cvtsd2si (TARGET_SSE2)
#define HAVE_sse2_cvtsd2si_round ((TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_sse2_cvtsd2si_2 (TARGET_SSE2)
#define HAVE_sse2_cvtsd2siq (TARGET_SSE2 && TARGET_64BIT)
#define HAVE_sse2_cvtsd2siq_round ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_64BIT))
#define HAVE_sse2_cvtsd2siq_2 (TARGET_SSE2 && TARGET_64BIT)
#define HAVE_sse2_cvttsd2si (TARGET_SSE2)
#define HAVE_sse2_cvttsd2si_round ((TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_sse2_cvttsd2siq (TARGET_SSE2 && TARGET_64BIT)
#define HAVE_sse2_cvttsd2siq_round ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_64BIT))
#define HAVE_floatv8siv8df2 ((TARGET_AVX && 1) && (TARGET_AVX512F))
#define HAVE_floatv8siv8df2_mask ((TARGET_AVX512F) && ((TARGET_AVX && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_floatv4siv4df2 (TARGET_AVX && 1)
#define HAVE_floatv4siv4df2_mask ((TARGET_AVX512F) && (TARGET_AVX && (32 == 64 || TARGET_AVX512VL)))
#define HAVE_floatv8div8df2 (TARGET_AVX512DQ)
#define HAVE_floatv8div8df2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_floatv8div8df2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_floatv8div8df2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ)))
#define HAVE_ufloatv8div8df2 (TARGET_AVX512DQ)
#define HAVE_ufloatv8div8df2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_ufloatv8div8df2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_ufloatv8div8df2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ)))
#define HAVE_floatv4div4df2 ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
#define HAVE_floatv4div4df2_round ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_floatv4div4df2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_floatv4div4df2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL))))
#define HAVE_ufloatv4div4df2 ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
#define HAVE_ufloatv4div4df2_round ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_ufloatv4div4df2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_ufloatv4div4df2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL))))
#define HAVE_floatv2div2df2 ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
#define HAVE_floatv2div2df2_round ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_floatv2div2df2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_floatv2div2df2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL))))
#define HAVE_ufloatv2div2df2 ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
#define HAVE_ufloatv2div2df2_round ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_ufloatv2div2df2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_ufloatv2div2df2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL))))
#define HAVE_floatv8div8sf2 (TARGET_AVX512DQ && 1)
#define HAVE_floatv8div8sf2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8SFmode == V8SFmode)))
#define HAVE_floatv8div8sf2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
#define HAVE_floatv8div8sf2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8SFmode == V8SFmode))))
#define HAVE_ufloatv8div8sf2 (TARGET_AVX512DQ && 1)
#define HAVE_ufloatv8div8sf2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8SFmode == V8SFmode)))
#define HAVE_ufloatv8div8sf2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
#define HAVE_ufloatv8div8sf2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8SFmode == V8SFmode))))
#define HAVE_floatv4div4sf2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
#define HAVE_floatv4div4sf2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_ufloatv4div4sf2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
#define HAVE_ufloatv4div4sf2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_floatv2div2sf2_mask (TARGET_AVX512DQ && TARGET_AVX512VL)
#define HAVE_ufloatv2div2sf2_mask (TARGET_AVX512DQ && TARGET_AVX512VL)
#define HAVE_ufloatv8siv8df2 (TARGET_AVX512F)
#define HAVE_ufloatv8siv8df2_mask (TARGET_AVX512F)
#define HAVE_ufloatv4siv4df2 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_ufloatv4siv4df2_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_ufloatv2siv2df2 (TARGET_AVX512VL)
#define HAVE_ufloatv2siv2df2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_cvtdq2pd512_2 (TARGET_AVX512F)
#define HAVE_avx_cvtdq2pd256_2 (TARGET_AVX)
#define HAVE_sse2_cvtdq2pd (TARGET_SSE2 && 1)
#define HAVE_sse2_cvtdq2pd_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
#define HAVE_avx512f_cvtpd2dq512 (TARGET_AVX512F)
#define HAVE_avx512f_cvtpd2dq512_round (TARGET_AVX512F)
#define HAVE_avx512f_cvtpd2dq512_mask (TARGET_AVX512F)
#define HAVE_avx512f_cvtpd2dq512_mask_round (TARGET_AVX512F)
#define HAVE_avx_cvtpd2dq256 (TARGET_AVX && 1)
#define HAVE_avx_cvtpd2dq256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
#define HAVE_sse2_cvtpd2dq (TARGET_SSE2 && 1)
#define HAVE_sse2_cvtpd2dq_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
#define HAVE_ufix_notruncv8dfv8si2 (TARGET_AVX512F)
#define HAVE_ufix_notruncv8dfv8si2_round (TARGET_AVX512F)
#define HAVE_ufix_notruncv8dfv8si2_mask (TARGET_AVX512F)
#define HAVE_ufix_notruncv8dfv8si2_mask_round (TARGET_AVX512F)
#define HAVE_ufix_notruncv4dfv4si2 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_ufix_notruncv4dfv4si2_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_ufix_notruncv4dfv4si2_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_ufix_notruncv4dfv4si2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_ufix_notruncv2dfv2si2 (TARGET_AVX512VL)
#define HAVE_ufix_notruncv2dfv2si2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_fix_truncv8dfv8si2 (TARGET_AVX512F)
#define HAVE_fix_truncv8dfv8si2_round (TARGET_AVX512F)
#define HAVE_fix_truncv8dfv8si2_mask (TARGET_AVX512F)
#define HAVE_fix_truncv8dfv8si2_mask_round (TARGET_AVX512F)
#define HAVE_ufix_truncv8dfv8si2 (TARGET_AVX512F)
#define HAVE_ufix_truncv8dfv8si2_round (TARGET_AVX512F)
#define HAVE_ufix_truncv8dfv8si2_mask (TARGET_AVX512F)
#define HAVE_ufix_truncv8dfv8si2_mask_round (TARGET_AVX512F)
#define HAVE_ufix_truncv2dfv2si2 (TARGET_AVX512VL)
#define HAVE_ufix_truncv2dfv2si2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_fix_truncv4dfv4si2 (TARGET_AVX || (TARGET_AVX512VL && TARGET_AVX512F))
#define HAVE_fix_truncv4dfv4si2_mask ((TARGET_AVX512F) && (TARGET_AVX || (TARGET_AVX512VL && TARGET_AVX512F)))
#define HAVE_ufix_truncv4dfv4si2 (TARGET_AVX512VL && TARGET_AVX512F)
#define HAVE_ufix_truncv4dfv4si2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL && TARGET_AVX512F))
#define HAVE_fix_truncv8dfv8di2 (TARGET_AVX512DQ && 1)
#define HAVE_fix_truncv8dfv8di2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)))
#define HAVE_fix_truncv8dfv8di2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
#define HAVE_fix_truncv8dfv8di2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode))))
#define HAVE_ufix_truncv8dfv8di2 (TARGET_AVX512DQ && 1)
#define HAVE_ufix_truncv8dfv8di2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)))
#define HAVE_ufix_truncv8dfv8di2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
#define HAVE_ufix_truncv8dfv8di2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode))))
#define HAVE_fix_truncv4dfv4di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
#define HAVE_fix_truncv4dfv4di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_ufix_truncv4dfv4di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
#define HAVE_ufix_truncv4dfv4di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_fix_truncv2dfv2di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
#define HAVE_fix_truncv2dfv2di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_ufix_truncv2dfv2di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
#define HAVE_ufix_truncv2dfv2di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_fix_notruncv8dfv8di2 (TARGET_AVX512DQ && 1)
#define HAVE_fix_notruncv8dfv8di2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)))
#define HAVE_fix_notruncv8dfv8di2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
#define HAVE_fix_notruncv8dfv8di2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode))))
#define HAVE_fix_notruncv4dfv4di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
#define HAVE_fix_notruncv4dfv4di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_fix_notruncv2dfv2di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
#define HAVE_fix_notruncv2dfv2di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_ufix_notruncv8dfv8di2 (TARGET_AVX512DQ && 1)
#define HAVE_ufix_notruncv8dfv8di2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)))
#define HAVE_ufix_notruncv8dfv8di2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
#define HAVE_ufix_notruncv8dfv8di2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode))))
#define HAVE_ufix_notruncv4dfv4di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
#define HAVE_ufix_notruncv4dfv4di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_ufix_notruncv2dfv2di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
#define HAVE_ufix_notruncv2dfv2di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_fix_truncv8sfv8di2 (TARGET_AVX512DQ && 1)
#define HAVE_fix_truncv8sfv8di2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8SFmode == V8SFmode)))
#define HAVE_fix_truncv8sfv8di2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
#define HAVE_fix_truncv8sfv8di2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8SFmode == V8SFmode))))
#define HAVE_ufix_truncv8sfv8di2 (TARGET_AVX512DQ && 1)
#define HAVE_ufix_truncv8sfv8di2_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8SFmode == V8SFmode)))
#define HAVE_ufix_truncv8sfv8di2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
#define HAVE_ufix_truncv8sfv8di2_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8SFmode == V8SFmode))))
#define HAVE_fix_truncv4sfv4di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
#define HAVE_fix_truncv4sfv4di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_ufix_truncv4sfv4di2 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
#define HAVE_ufix_truncv4sfv4di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_fix_truncv2sfv2di2 (TARGET_AVX512DQ && TARGET_AVX512VL)
#define HAVE_fix_truncv2sfv2di2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && TARGET_AVX512VL))
#define HAVE_ufix_truncv2sfv2di2 (TARGET_AVX512DQ && TARGET_AVX512VL)
#define HAVE_ufix_truncv2sfv2di2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && TARGET_AVX512VL))
#define HAVE_ufix_truncv8sfv8si2 (TARGET_AVX512VL)
#define HAVE_ufix_truncv8sfv8si2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_ufix_truncv4sfv4si2 (TARGET_AVX512VL)
#define HAVE_ufix_truncv4sfv4si2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_sse2_cvttpd2dq (TARGET_SSE2 && 1)
#define HAVE_sse2_cvttpd2dq_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
#define HAVE_sse2_cvtsd2ss (TARGET_SSE2)
#define HAVE_sse2_cvtsd2ss_round ((TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_sse2_cvtss2sd (TARGET_SSE2)
#define HAVE_sse2_cvtss2sd_round ((TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_avx512f_cvtpd2ps512_mask (TARGET_AVX512F)
#define HAVE_avx512f_cvtpd2ps512_mask_round (TARGET_AVX512F)
#define HAVE_avx_cvtpd2ps256 (TARGET_AVX && 1)
#define HAVE_avx_cvtpd2ps256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
#define HAVE_avx512f_cvtps2pd512 ((TARGET_AVX && 1 && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_cvtps2pd512_round ((TARGET_AVX512F) && ((TARGET_AVX && 1 && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_avx512f_cvtps2pd512_mask ((TARGET_AVX512F) && ((TARGET_AVX && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_avx512f_cvtps2pd512_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_avx_cvtps2pd256 (TARGET_AVX && 1 && 1)
#define HAVE_avx_cvtps2pd256_mask ((TARGET_AVX512F) && (TARGET_AVX && (32 == 64 || TARGET_AVX512VL) && 1))
#define HAVE_vec_unpacks_lo_v16sf (TARGET_AVX512F)
#define HAVE_avx512bw_cvtb2maskv64qi (TARGET_AVX512BW)
#define HAVE_avx512vl_cvtb2maskv16qi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cvtb2maskv32qi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512bw_cvtw2maskv32hi (TARGET_AVX512BW)
#define HAVE_avx512vl_cvtw2maskv16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cvtw2maskv8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512f_cvtd2maskv16si (TARGET_AVX512DQ)
#define HAVE_avx512vl_cvtd2maskv8si ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cvtd2maskv4si ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
#define HAVE_avx512f_cvtq2maskv8di (TARGET_AVX512DQ)
#define HAVE_avx512vl_cvtq2maskv4di ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cvtq2maskv2di ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
#define HAVE_sse2_cvtps2pd (TARGET_SSE2 && 1)
#define HAVE_sse2_cvtps2pd_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
#define HAVE_sse_movhlps (TARGET_SSE && !(MEM_P (operands[1]) && MEM_P (operands[2])))
#define HAVE_sse_movlhps (TARGET_SSE && ix86_binary_operator_ok (UNKNOWN, V4SFmode, operands))
#define HAVE_avx512f_unpckhps512_mask (TARGET_AVX512F)
#define HAVE_avx_unpckhps256 (TARGET_AVX && 1)
#define HAVE_avx_unpckhps256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
#define HAVE_vec_interleave_highv4sf (TARGET_SSE && 1)
#define HAVE_vec_interleave_highv4sf_mask ((TARGET_AVX512F) && (TARGET_SSE && TARGET_AVX512VL))
#define HAVE_avx512f_unpcklps512_mask (TARGET_AVX512F)
#define HAVE_avx_unpcklps256 (TARGET_AVX && 1)
#define HAVE_avx_unpcklps256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
#define HAVE_unpcklps128_mask (TARGET_AVX512VL)
#define HAVE_vec_interleave_lowv4sf (TARGET_SSE)
#define HAVE_avx_movshdup256 (TARGET_AVX && 1)
#define HAVE_avx_movshdup256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
#define HAVE_sse3_movshdup (TARGET_SSE3 && 1)
#define HAVE_sse3_movshdup_mask ((TARGET_AVX512F) && (TARGET_SSE3 && TARGET_AVX512VL))
#define HAVE_avx512f_movshdup512_mask (TARGET_AVX512F)
#define HAVE_avx_movsldup256 (TARGET_AVX && 1)
#define HAVE_avx_movsldup256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
#define HAVE_sse3_movsldup (TARGET_SSE3 && 1)
#define HAVE_sse3_movsldup_mask ((TARGET_AVX512F) && (TARGET_SSE3 && TARGET_AVX512VL))
#define HAVE_avx512f_movsldup512_mask (TARGET_AVX512F)
#define HAVE_avx_shufps256_1 (TARGET_AVX \
&& 1 \
&& (INTVAL (operands[3]) == (INTVAL (operands[7]) - 4) \
&& INTVAL (operands[4]) == (INTVAL (operands[8]) - 4) \
&& INTVAL (operands[5]) == (INTVAL (operands[9]) - 4) \
&& INTVAL (operands[6]) == (INTVAL (operands[10]) - 4)))
#define HAVE_avx_shufps256_1_mask ((TARGET_AVX512F) && (TARGET_AVX \
&& TARGET_AVX512VL \
&& (INTVAL (operands[3]) == (INTVAL (operands[7]) - 4) \
&& INTVAL (operands[4]) == (INTVAL (operands[8]) - 4) \
&& INTVAL (operands[5]) == (INTVAL (operands[9]) - 4) \
&& INTVAL (operands[6]) == (INTVAL (operands[10]) - 4))))
#define HAVE_sse_shufps_v4sf_mask (TARGET_AVX512VL)
#define HAVE_sse_shufps_v4si (TARGET_SSE)
#define HAVE_sse_shufps_v4sf (TARGET_SSE)
#define HAVE_sse_storehps (TARGET_SSE)
#define HAVE_sse_loadhps (TARGET_SSE)
#define HAVE_sse_storelps (TARGET_SSE)
#define HAVE_sse_loadlps (TARGET_SSE)
#define HAVE_sse_movss (TARGET_SSE)
#define HAVE_avx2_vec_dupv8sf ((TARGET_AVX2) && (TARGET_AVX))
#define HAVE_avx2_vec_dupv4sf (TARGET_AVX2)
#define HAVE_avx2_vec_dupv8sf_1 (TARGET_AVX2)
#define HAVE_avx512f_vec_dupv16sf_1 (TARGET_AVX512F)
#define HAVE_avx512f_vec_dupv8df_1 (TARGET_AVX512F)
#define HAVE_vec_setv4si_0 (TARGET_SSE)
#define HAVE_vec_setv4sf_0 (TARGET_SSE)
#define HAVE_sse4_1_insertps (TARGET_SSE4_1)
#define HAVE_avx512dq_vextractf64x2_1_maskm (TARGET_AVX512DQ \
&& (INTVAL (operands[2]) % 2 == 0) \
&& (INTVAL (operands[2]) == INTVAL (operands[3]) - 1) \
&& rtx_equal_p (operands[4], operands[0]))
#define HAVE_avx512dq_vextracti64x2_1_maskm (TARGET_AVX512DQ \
&& (INTVAL (operands[2]) % 2 == 0) \
&& (INTVAL (operands[2]) == INTVAL (operands[3]) - 1) \
&& rtx_equal_p (operands[4], operands[0]))
#define HAVE_avx512f_vextractf32x4_1_maskm (TARGET_AVX512F \
&& ((INTVAL (operands[2]) % 4 == 0) \
&& INTVAL (operands[2]) == (INTVAL (operands[3]) - 1) \
&& INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
&& INTVAL (operands[4]) == (INTVAL (operands[5]) - 1)) \
&& rtx_equal_p (operands[6], operands[0]))
#define HAVE_avx512f_vextracti32x4_1_maskm (TARGET_AVX512F \
&& ((INTVAL (operands[2]) % 4 == 0) \
&& INTVAL (operands[2]) == (INTVAL (operands[3]) - 1) \
&& INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
&& INTVAL (operands[4]) == (INTVAL (operands[5]) - 1)) \
&& rtx_equal_p (operands[6], operands[0]))
#define HAVE_avx512dq_vextractf64x2_1_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && (INTVAL (operands[2]) == INTVAL (operands[3]) - 1)))
#define HAVE_avx512dq_vextracti64x2_1_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && (INTVAL (operands[2]) == INTVAL (operands[3]) - 1)))
#define HAVE_avx512f_vextractf32x4_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
&& (INTVAL (operands[2]) == (INTVAL (operands[3]) - 1) \
&& INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
&& INTVAL (operands[4]) == (INTVAL (operands[5]) - 1))))
#define HAVE_avx512f_vextracti32x4_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
&& (INTVAL (operands[2]) == (INTVAL (operands[3]) - 1) \
&& INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
&& INTVAL (operands[4]) == (INTVAL (operands[5]) - 1))))
#define HAVE_vec_extract_lo_v8df_maskm (TARGET_AVX512F \
&& rtx_equal_p (operands[2], operands[0]))
#define HAVE_vec_extract_lo_v8di_maskm (TARGET_AVX512F \
&& rtx_equal_p (operands[2], operands[0]))
#define HAVE_vec_extract_lo_v8df (TARGET_AVX512F \
&& (false || !(MEM_P (operands[0]) && MEM_P (operands[1]))))
#define HAVE_vec_extract_lo_v8df_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
&& (true || !(MEM_P (operands[0]) && MEM_P (operands[1])))))
#define HAVE_vec_extract_lo_v8di (TARGET_AVX512F \
&& (false || !(MEM_P (operands[0]) && MEM_P (operands[1]))))
#define HAVE_vec_extract_lo_v8di_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
&& (true || !(MEM_P (operands[0]) && MEM_P (operands[1])))))
#define HAVE_vec_extract_hi_v8df_maskm (TARGET_AVX512F \
&& rtx_equal_p (operands[2], operands[0]))
#define HAVE_vec_extract_hi_v8di_maskm (TARGET_AVX512F \
&& rtx_equal_p (operands[2], operands[0]))
#define HAVE_vec_extract_hi_v8df (TARGET_AVX512F)
#define HAVE_vec_extract_hi_v8df_mask (TARGET_AVX512F)
#define HAVE_vec_extract_hi_v8di (TARGET_AVX512F)
#define HAVE_vec_extract_hi_v8di_mask (TARGET_AVX512F)
#define HAVE_vec_extract_hi_v16sf_maskm (TARGET_AVX512DQ \
&& rtx_equal_p (operands[2], operands[0]))
#define HAVE_vec_extract_hi_v16si_maskm (TARGET_AVX512DQ \
&& rtx_equal_p (operands[2], operands[0]))
#define HAVE_vec_extract_hi_v16sf (TARGET_AVX512F && 1)
#define HAVE_vec_extract_hi_v16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512F && TARGET_AVX512DQ))
#define HAVE_vec_extract_hi_v16si (TARGET_AVX512F && 1)
#define HAVE_vec_extract_hi_v16si_mask ((TARGET_AVX512F) && (TARGET_AVX512F && TARGET_AVX512DQ))
#define HAVE_vec_extract_lo_v16sf (TARGET_AVX512F \
&& 1 \
&& (false || !(MEM_P (operands[0]) && MEM_P (operands[1]))))
#define HAVE_vec_extract_lo_v16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
&& (64 == 64 || TARGET_AVX512VL) \
&& (true || !(MEM_P (operands[0]) && MEM_P (operands[1])))))
#define HAVE_vec_extract_lo_v16si (TARGET_AVX512F \
&& 1 \
&& (false || !(MEM_P (operands[0]) && MEM_P (operands[1]))))
#define HAVE_vec_extract_lo_v16si_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
&& (64 == 64 || TARGET_AVX512VL) \
&& (true || !(MEM_P (operands[0]) && MEM_P (operands[1])))))
#define HAVE_vec_extract_lo_v4di (TARGET_AVX \
&& 1 && 1 \
&& (false || !(MEM_P (operands[0]) && MEM_P (operands[1]))))
#define HAVE_vec_extract_lo_v4di_mask ((TARGET_AVX512F) && (TARGET_AVX \
&& TARGET_AVX512VL && TARGET_AVX512DQ \
&& (true || !(MEM_P (operands[0]) && MEM_P (operands[1])))))
#define HAVE_vec_extract_lo_v4df (TARGET_AVX \
&& 1 && 1 \
&& (false || !(MEM_P (operands[0]) && MEM_P (operands[1]))))
#define HAVE_vec_extract_lo_v4df_mask ((TARGET_AVX512F) && (TARGET_AVX \
&& TARGET_AVX512VL && TARGET_AVX512DQ \
&& (true || !(MEM_P (operands[0]) && MEM_P (operands[1])))))
#define HAVE_vec_extract_hi_v4di (TARGET_AVX && 1 && 1)
#define HAVE_vec_extract_hi_v4di_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL && TARGET_AVX512DQ))
#define HAVE_vec_extract_hi_v4df (TARGET_AVX && 1 && 1)
#define HAVE_vec_extract_hi_v4df_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL && TARGET_AVX512DQ))
#define HAVE_vec_extract_lo_v8si (TARGET_AVX \
&& 1 && 1 \
&& (false || !(MEM_P (operands[0]) && MEM_P (operands[1]))))
#define HAVE_vec_extract_lo_v8si_mask ((TARGET_AVX512F) && (TARGET_AVX \
&& TARGET_AVX512VL && TARGET_AVX512DQ \
&& (true || !(MEM_P (operands[0]) && MEM_P (operands[1])))))
#define HAVE_vec_extract_lo_v8sf (TARGET_AVX \
&& 1 && 1 \
&& (false || !(MEM_P (operands[0]) && MEM_P (operands[1]))))
#define HAVE_vec_extract_lo_v8sf_mask ((TARGET_AVX512F) && (TARGET_AVX \
&& TARGET_AVX512VL && TARGET_AVX512DQ \
&& (true || !(MEM_P (operands[0]) && MEM_P (operands[1])))))
#define HAVE_vec_extract_lo_v8si_maskm (TARGET_AVX512VL && TARGET_AVX512F \
&& rtx_equal_p (operands[2], operands[0]))
#define HAVE_vec_extract_lo_v8sf_maskm (TARGET_AVX512VL && TARGET_AVX512F \
&& rtx_equal_p (operands[2], operands[0]))
#define HAVE_vec_extract_hi_v8si_maskm (TARGET_AVX512F && TARGET_AVX512VL \
&& rtx_equal_p (operands[2], operands[0]))
#define HAVE_vec_extract_hi_v8sf_maskm (TARGET_AVX512F && TARGET_AVX512VL \
&& rtx_equal_p (operands[2], operands[0]))
#define HAVE_vec_extract_hi_v8si_mask (TARGET_AVX512VL)
#define HAVE_vec_extract_hi_v8sf_mask (TARGET_AVX512VL)
#define HAVE_vec_extract_hi_v8si (TARGET_AVX)
#define HAVE_vec_extract_hi_v8sf (TARGET_AVX)
#define HAVE_vec_extract_lo_v32hi (TARGET_AVX512F && !(MEM_P (operands[0]) && MEM_P (operands[1])))
#define HAVE_vec_extract_hi_v32hi (TARGET_AVX512F)
#define HAVE_vec_extract_lo_v16hi (TARGET_AVX && !(MEM_P (operands[0]) && MEM_P (operands[1])))
#define HAVE_vec_extract_hi_v16hi (TARGET_AVX)
#define HAVE_vec_extract_lo_v64qi (TARGET_AVX512F && !(MEM_P (operands[0]) && MEM_P (operands[1])))
#define HAVE_vec_extract_hi_v64qi (TARGET_AVX512F)
#define HAVE_vec_extract_lo_v32qi (TARGET_AVX && !(MEM_P (operands[0]) && MEM_P (operands[1])))
#define HAVE_vec_extract_hi_v32qi (TARGET_AVX)
#define HAVE_avx512f_unpckhpd512_mask (TARGET_AVX512F)
#define HAVE_avx_unpckhpd256 (TARGET_AVX && 1)
#define HAVE_avx_unpckhpd256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
#define HAVE_avx512vl_unpckhpd128_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_unpcklpd128_mask (TARGET_AVX512VL)
#define HAVE_avx512f_vmscalefv4sf (TARGET_AVX512F)
#define HAVE_avx512f_vmscalefv4sf_round (TARGET_AVX512F)
#define HAVE_avx512f_vmscalefv2df ((TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_avx512f_vmscalefv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
#define HAVE_avx512f_scalefv16sf (TARGET_AVX512F)
#define HAVE_avx512f_scalefv16sf_round (TARGET_AVX512F)
#define HAVE_avx512f_scalefv16sf_mask (TARGET_AVX512F)
#define HAVE_avx512f_scalefv16sf_mask_round (TARGET_AVX512F)
#define HAVE_avx512vl_scalefv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scalefv8sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_scalefv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_scalefv8sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512vl_scalefv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scalefv4sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_scalefv4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_scalefv4sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512f_scalefv8df (TARGET_AVX512F)
#define HAVE_avx512f_scalefv8df_round (TARGET_AVX512F)
#define HAVE_avx512f_scalefv8df_mask (TARGET_AVX512F)
#define HAVE_avx512f_scalefv8df_mask_round (TARGET_AVX512F)
#define HAVE_avx512vl_scalefv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scalefv4df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_scalefv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_scalefv4df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512vl_scalefv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scalefv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_scalefv2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_scalefv2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512f_vternlogv16si (TARGET_AVX512F)
#define HAVE_avx512f_vternlogv16si_maskz_1 (TARGET_AVX512F)
#define HAVE_avx512vl_vternlogv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vternlogv8si_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vternlogv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vternlogv4si_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_vternlogv8di (TARGET_AVX512F)
#define HAVE_avx512f_vternlogv8di_maskz_1 (TARGET_AVX512F)
#define HAVE_avx512vl_vternlogv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vternlogv4di_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vternlogv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vternlogv2di_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_vternlogv16si_mask (TARGET_AVX512F)
#define HAVE_avx512vl_vternlogv8si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vternlogv4si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_vternlogv8di_mask (TARGET_AVX512F)
#define HAVE_avx512vl_vternlogv4di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vternlogv2di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_getexpv16sf (TARGET_AVX512F)
#define HAVE_avx512f_getexpv16sf_round (TARGET_AVX512F)
#define HAVE_avx512f_getexpv16sf_mask (TARGET_AVX512F)
#define HAVE_avx512f_getexpv16sf_mask_round (TARGET_AVX512F)
#define HAVE_avx512vl_getexpv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_getexpv8sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_getexpv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_getexpv8sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512vl_getexpv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_getexpv4sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_getexpv4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_getexpv4sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512f_getexpv8df (TARGET_AVX512F)
#define HAVE_avx512f_getexpv8df_round (TARGET_AVX512F)
#define HAVE_avx512f_getexpv8df_mask (TARGET_AVX512F)
#define HAVE_avx512f_getexpv8df_mask_round (TARGET_AVX512F)
#define HAVE_avx512vl_getexpv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_getexpv4df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_getexpv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_getexpv4df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512vl_getexpv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_getexpv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_getexpv2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_getexpv2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512f_sgetexpv4sf (TARGET_AVX512F)
#define HAVE_avx512f_sgetexpv4sf_round (TARGET_AVX512F)
#define HAVE_avx512f_sgetexpv2df ((TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_avx512f_sgetexpv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
#define HAVE_avx512f_alignv16si_mask (TARGET_AVX512F)
#define HAVE_avx512vl_alignv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_alignv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_alignv8di_mask (TARGET_AVX512F)
#define HAVE_avx512vl_alignv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_alignv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_fixupimmv16sf (TARGET_AVX512F)
#define HAVE_avx512f_fixupimmv16sf_round (TARGET_AVX512F)
#define HAVE_avx512f_fixupimmv16sf_maskz_1 (TARGET_AVX512F)
#define HAVE_avx512f_fixupimmv16sf_maskz_1_round (TARGET_AVX512F)
#define HAVE_avx512vl_fixupimmv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fixupimmv8sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fixupimmv8sf_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fixupimmv8sf_maskz_1_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fixupimmv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fixupimmv4sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fixupimmv4sf_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fixupimmv4sf_maskz_1_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_fixupimmv8df (TARGET_AVX512F)
#define HAVE_avx512f_fixupimmv8df_round (TARGET_AVX512F)
#define HAVE_avx512f_fixupimmv8df_maskz_1 (TARGET_AVX512F)
#define HAVE_avx512f_fixupimmv8df_maskz_1_round (TARGET_AVX512F)
#define HAVE_avx512vl_fixupimmv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fixupimmv4df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fixupimmv4df_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fixupimmv4df_maskz_1_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fixupimmv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fixupimmv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fixupimmv2df_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fixupimmv2df_maskz_1_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_fixupimmv16sf_mask (TARGET_AVX512F)
#define HAVE_avx512f_fixupimmv16sf_mask_round (TARGET_AVX512F)
#define HAVE_avx512vl_fixupimmv8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fixupimmv8sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fixupimmv4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fixupimmv4sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_fixupimmv8df_mask (TARGET_AVX512F)
#define HAVE_avx512f_fixupimmv8df_mask_round (TARGET_AVX512F)
#define HAVE_avx512vl_fixupimmv4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fixupimmv4df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fixupimmv2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fixupimmv2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_sfixupimmv4sf (TARGET_AVX512F)
#define HAVE_avx512f_sfixupimmv4sf_round (TARGET_AVX512F)
#define HAVE_avx512f_sfixupimmv4sf_maskz_1 (TARGET_AVX512F)
#define HAVE_avx512f_sfixupimmv4sf_maskz_1_round (TARGET_AVX512F)
#define HAVE_avx512f_sfixupimmv2df ((TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_avx512f_sfixupimmv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
#define HAVE_avx512f_sfixupimmv2df_maskz_1 ((TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_avx512f_sfixupimmv2df_maskz_1_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
#define HAVE_avx512f_sfixupimmv4sf_mask (TARGET_AVX512F)
#define HAVE_avx512f_sfixupimmv4sf_mask_round (TARGET_AVX512F)
#define HAVE_avx512f_sfixupimmv2df_mask ((TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_avx512f_sfixupimmv2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
#define HAVE_avx512f_rndscalev16sf (TARGET_AVX512F)
#define HAVE_avx512f_rndscalev16sf_round (TARGET_AVX512F)
#define HAVE_avx512f_rndscalev16sf_mask (TARGET_AVX512F)
#define HAVE_avx512f_rndscalev16sf_mask_round (TARGET_AVX512F)
#define HAVE_avx512vl_rndscalev8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rndscalev8sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rndscalev8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rndscalev8sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512vl_rndscalev4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rndscalev4sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rndscalev4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rndscalev4sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512f_rndscalev8df (TARGET_AVX512F)
#define HAVE_avx512f_rndscalev8df_round (TARGET_AVX512F)
#define HAVE_avx512f_rndscalev8df_mask (TARGET_AVX512F)
#define HAVE_avx512f_rndscalev8df_mask_round (TARGET_AVX512F)
#define HAVE_avx512vl_rndscalev4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rndscalev4df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rndscalev4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rndscalev4df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512vl_rndscalev2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rndscalev2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rndscalev2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rndscalev2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512f_rndscalev4sf (TARGET_AVX512F)
#define HAVE_avx512f_rndscalev4sf_round (TARGET_AVX512F)
#define HAVE_avx512f_rndscalev2df ((TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_avx512f_rndscalev2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
#define HAVE_avx512f_shufps512_1 (TARGET_AVX512F \
&& (INTVAL (operands[3]) == (INTVAL (operands[7]) - 4) \
&& INTVAL (operands[4]) == (INTVAL (operands[8]) - 4) \
&& INTVAL (operands[5]) == (INTVAL (operands[9]) - 4) \
&& INTVAL (operands[6]) == (INTVAL (operands[10]) - 4) \
&& INTVAL (operands[3]) == (INTVAL (operands[11]) - 8) \
&& INTVAL (operands[4]) == (INTVAL (operands[12]) - 8) \
&& INTVAL (operands[5]) == (INTVAL (operands[13]) - 8) \
&& INTVAL (operands[6]) == (INTVAL (operands[14]) - 8) \
&& INTVAL (operands[3]) == (INTVAL (operands[15]) - 12) \
&& INTVAL (operands[4]) == (INTVAL (operands[16]) - 12) \
&& INTVAL (operands[5]) == (INTVAL (operands[17]) - 12) \
&& INTVAL (operands[6]) == (INTVAL (operands[18]) - 12)))
#define HAVE_avx512f_shufps512_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
&& (INTVAL (operands[3]) == (INTVAL (operands[7]) - 4) \
&& INTVAL (operands[4]) == (INTVAL (operands[8]) - 4) \
&& INTVAL (operands[5]) == (INTVAL (operands[9]) - 4) \
&& INTVAL (operands[6]) == (INTVAL (operands[10]) - 4) \
&& INTVAL (operands[3]) == (INTVAL (operands[11]) - 8) \
&& INTVAL (operands[4]) == (INTVAL (operands[12]) - 8) \
&& INTVAL (operands[5]) == (INTVAL (operands[13]) - 8) \
&& INTVAL (operands[6]) == (INTVAL (operands[14]) - 8) \
&& INTVAL (operands[3]) == (INTVAL (operands[15]) - 12) \
&& INTVAL (operands[4]) == (INTVAL (operands[16]) - 12) \
&& INTVAL (operands[5]) == (INTVAL (operands[17]) - 12) \
&& INTVAL (operands[6]) == (INTVAL (operands[18]) - 12))))
#define HAVE_avx512f_shufpd512_1 (TARGET_AVX512F)
#define HAVE_avx512f_shufpd512_1_mask (TARGET_AVX512F)
#define HAVE_avx_shufpd256_1 (TARGET_AVX && 1)
#define HAVE_avx_shufpd256_1_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
#define HAVE_sse2_shufpd_v2df_mask (TARGET_AVX512VL)
#define HAVE_avx2_interleave_highv4di (TARGET_AVX2 && 1)
#define HAVE_avx2_interleave_highv4di_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
#define HAVE_avx512f_interleave_highv8di_mask (TARGET_AVX512F)
#define HAVE_vec_interleave_highv2di (TARGET_SSE2 && 1)
#define HAVE_vec_interleave_highv2di_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
#define HAVE_avx2_interleave_lowv4di (TARGET_AVX2 && 1)
#define HAVE_avx2_interleave_lowv4di_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
#define HAVE_avx512f_interleave_lowv8di_mask (TARGET_AVX512F)
#define HAVE_vec_interleave_lowv2di (TARGET_SSE2 && 1)
#define HAVE_vec_interleave_lowv2di_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
#define HAVE_sse2_shufpd_v2di (TARGET_SSE2)
#define HAVE_sse2_shufpd_v2df (TARGET_SSE2)
#define HAVE_sse2_storehpd (TARGET_SSE2 && !(MEM_P (operands[0]) && MEM_P (operands[1])))
#define HAVE_sse2_storelpd (TARGET_SSE2 && !(MEM_P (operands[0]) && MEM_P (operands[1])))
#define HAVE_sse2_loadhpd (TARGET_SSE2 && !(MEM_P (operands[1]) && MEM_P (operands[2])))
#define HAVE_sse2_loadlpd (TARGET_SSE2 && !(MEM_P (operands[1]) && MEM_P (operands[2])))
#define HAVE_sse2_movsd (TARGET_SSE2)
#define HAVE_vec_dupv2df (TARGET_SSE2 && 1)
#define HAVE_vec_dupv2df_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
#define HAVE_vec_concatv2df (TARGET_SSE \
&& (!(MEM_P (operands[1]) && MEM_P (operands[2])) \
|| (TARGET_SSE3 && rtx_equal_p (operands[1], operands[2]))))
#define HAVE_avx512f_ss_truncatev16siv16qi2_mask (TARGET_AVX512F)
#define HAVE_avx512f_truncatev16siv16qi2_mask (TARGET_AVX512F)
#define HAVE_avx512f_us_truncatev16siv16qi2_mask (TARGET_AVX512F)
#define HAVE_avx512f_ss_truncatev16siv16hi2_mask (TARGET_AVX512F)
#define HAVE_avx512f_truncatev16siv16hi2_mask (TARGET_AVX512F)
#define HAVE_avx512f_us_truncatev16siv16hi2_mask (TARGET_AVX512F)
#define HAVE_avx512f_ss_truncatev8div8si2_mask (TARGET_AVX512F)
#define HAVE_avx512f_truncatev8div8si2_mask (TARGET_AVX512F)
#define HAVE_avx512f_us_truncatev8div8si2_mask (TARGET_AVX512F)
#define HAVE_avx512f_ss_truncatev8div8hi2_mask (TARGET_AVX512F)
#define HAVE_avx512f_truncatev8div8hi2_mask (TARGET_AVX512F)
#define HAVE_avx512f_us_truncatev8div8hi2_mask (TARGET_AVX512F)
#define HAVE_avx512bw_ss_truncatev32hiv32qi2 (TARGET_AVX512BW)
#define HAVE_avx512bw_truncatev32hiv32qi2 (TARGET_AVX512BW)
#define HAVE_avx512bw_us_truncatev32hiv32qi2 (TARGET_AVX512BW)
#define HAVE_avx512bw_ss_truncatev32hiv32qi2_mask (TARGET_AVX512BW)
#define HAVE_avx512bw_truncatev32hiv32qi2_mask (TARGET_AVX512BW)
#define HAVE_avx512bw_us_truncatev32hiv32qi2_mask (TARGET_AVX512BW)
#define HAVE_avx512vl_ss_truncatev4div4si2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev4div4si2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev4div4si2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev8siv8hi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev8siv8hi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev8siv8hi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev16hiv16qi2_mask ((TARGET_AVX512VL) && (TARGET_AVX512BW))
#define HAVE_avx512vl_truncatev16hiv16qi2_mask ((TARGET_AVX512VL) && (TARGET_AVX512BW))
#define HAVE_avx512vl_us_truncatev16hiv16qi2_mask ((TARGET_AVX512VL) && (TARGET_AVX512BW))
#define HAVE_avx512vl_ss_truncatev2div2qi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev2div2qi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev2div2qi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev2div2qi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev2div2qi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev2div2qi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev4siv4qi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev4siv4qi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev4siv4qi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev4div4qi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev4div4qi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev4div4qi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev4siv4qi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev4siv4qi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev4siv4qi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev4div4qi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev4div4qi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev4div4qi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev8hiv8qi2_mask ((TARGET_AVX512VL) && (TARGET_AVX512BW))
#define HAVE_avx512vl_truncatev8hiv8qi2_mask ((TARGET_AVX512VL) && (TARGET_AVX512BW))
#define HAVE_avx512vl_us_truncatev8hiv8qi2_mask ((TARGET_AVX512VL) && (TARGET_AVX512BW))
#define HAVE_avx512vl_ss_truncatev8siv8qi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev8siv8qi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev8siv8qi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev8hiv8qi2_mask_store ((TARGET_AVX512VL) && (TARGET_AVX512BW))
#define HAVE_avx512vl_truncatev8hiv8qi2_mask_store ((TARGET_AVX512VL) && (TARGET_AVX512BW))
#define HAVE_avx512vl_us_truncatev8hiv8qi2_mask_store ((TARGET_AVX512VL) && (TARGET_AVX512BW))
#define HAVE_avx512vl_ss_truncatev8siv8qi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev8siv8qi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev8siv8qi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev4siv4hi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev4siv4hi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev4siv4hi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev4div4hi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev4div4hi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev4div4hi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev4siv4hi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev4siv4hi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev4siv4hi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev4div4hi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev4div4hi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev4div4hi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev2div2hi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev2div2hi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev2div2hi2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev2div2hi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev2div2hi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev2div2hi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev2div2si2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev2div2si2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev2div2si2_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev2div2si2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev2div2si2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev2div2si2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512f_ss_truncatev8div16qi2_mask (TARGET_AVX512F)
#define HAVE_avx512f_truncatev8div16qi2_mask (TARGET_AVX512F)
#define HAVE_avx512f_us_truncatev8div16qi2_mask (TARGET_AVX512F)
#define HAVE_avx512f_ss_truncatev8div16qi2_mask_store (TARGET_AVX512F)
#define HAVE_avx512f_truncatev8div16qi2_mask_store (TARGET_AVX512F)
#define HAVE_avx512f_us_truncatev8div16qi2_mask_store (TARGET_AVX512F)
#define HAVE_avx512bw_pmaddwd512v32hi ((TARGET_AVX512BW && 1) && (TARGET_AVX512BW))
#define HAVE_avx512bw_pmaddwd512v32hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512BW)))
#define HAVE_avx512bw_pmaddwd512v16hi ((TARGET_AVX512BW && 1) && (TARGET_AVX2))
#define HAVE_avx512bw_pmaddwd512v16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX2)))
#define HAVE_avx512bw_pmaddwd512v8hi (TARGET_AVX512BW && 1)
#define HAVE_avx512bw_pmaddwd512v8hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW && (16 == 64 || TARGET_AVX512VL)))
#define HAVE_avx512dq_mulv8di3 ((TARGET_AVX512DQ && 1) && (TARGET_AVX512F))
#define HAVE_avx512dq_mulv8di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx512dq_mulv4di3 ((TARGET_AVX512DQ && 1) && (TARGET_AVX))
#define HAVE_avx512dq_mulv4di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX)))
#define HAVE_avx512dq_mulv2di3 (TARGET_AVX512DQ && 1)
#define HAVE_avx512dq_mulv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && (16 == 64 || TARGET_AVX512VL)))
#define HAVE_ashrv16hi3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_ashrv8hi3 (TARGET_SSE2)
#define HAVE_ashrv8si3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_ashrv4si3 (TARGET_SSE2)
#define HAVE_ashrv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512VL) && (TARGET_AVX512BW)))
#define HAVE_ashrv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512VL) && (TARGET_AVX512BW)))
#define HAVE_ashrv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_ashrv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_ashrv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_ashrv32hi3 ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_ashrv32hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512BW)))
#define HAVE_ashrv4di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_ashrv4di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_ashrv16si3 (TARGET_AVX512F)
#define HAVE_ashrv16si3_mask (TARGET_AVX512F)
#define HAVE_ashrv8di3 (TARGET_AVX512F)
#define HAVE_ashrv8di3_mask (TARGET_AVX512F)
#define HAVE_ashlv32hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_ashlv32hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_lshrv32hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_lshrv32hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_ashlv16hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
#define HAVE_ashlv16hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_lshrv16hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
#define HAVE_lshrv16hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_ashlv8hi3 (TARGET_SSE2 && 1 && 1)
#define HAVE_ashlv8hi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_lshrv8hi3 (TARGET_SSE2 && 1 && 1)
#define HAVE_lshrv8hi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_ashlv8si3 ((TARGET_SSE2 && 1) && (TARGET_AVX2))
#define HAVE_ashlv8si3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX2)))
#define HAVE_lshrv8si3 ((TARGET_SSE2 && 1) && (TARGET_AVX2))
#define HAVE_lshrv8si3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX2)))
#define HAVE_ashlv4si3 (TARGET_SSE2 && 1)
#define HAVE_ashlv4si3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL)))
#define HAVE_lshrv4si3 (TARGET_SSE2 && 1)
#define HAVE_lshrv4si3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL)))
#define HAVE_ashlv4di3 ((TARGET_SSE2 && 1) && (TARGET_AVX2))
#define HAVE_ashlv4di3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX2)))
#define HAVE_lshrv4di3 ((TARGET_SSE2 && 1) && (TARGET_AVX2))
#define HAVE_lshrv4di3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX2)))
#define HAVE_ashlv2di3 (TARGET_SSE2 && 1)
#define HAVE_ashlv2di3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL)))
#define HAVE_lshrv2di3 (TARGET_SSE2 && 1)
#define HAVE_lshrv2di3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL)))
#define HAVE_ashlv16si3 (TARGET_AVX512F && 1)
#define HAVE_ashlv16si3_mask ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL)))
#define HAVE_lshrv16si3 (TARGET_AVX512F && 1)
#define HAVE_lshrv16si3_mask ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL)))
#define HAVE_ashlv8di3 (TARGET_AVX512F && 1)
#define HAVE_ashlv8di3_mask ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL)))
#define HAVE_lshrv8di3 (TARGET_AVX512F && 1)
#define HAVE_lshrv8di3_mask ((TARGET_AVX512F) && (TARGET_AVX512F && (64 == 64 || TARGET_AVX512VL)))
#define HAVE_avx512bw_ashlv4ti3 ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_avx2_ashlv2ti3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_sse2_ashlv1ti3 (TARGET_SSE2)
#define HAVE_avx512bw_lshrv4ti3 ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_avx2_lshrv2ti3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_sse2_lshrv1ti3 (TARGET_SSE2)
#define HAVE_avx512f_rolvv16si (TARGET_AVX512F)
#define HAVE_avx512f_rolvv16si_mask (TARGET_AVX512F)
#define HAVE_avx512f_rorvv16si (TARGET_AVX512F)
#define HAVE_avx512f_rorvv16si_mask (TARGET_AVX512F)
#define HAVE_avx512vl_rolvv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rolvv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rorvv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rorvv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rolvv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rolvv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rorvv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rorvv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_rolvv8di (TARGET_AVX512F)
#define HAVE_avx512f_rolvv8di_mask (TARGET_AVX512F)
#define HAVE_avx512f_rorvv8di (TARGET_AVX512F)
#define HAVE_avx512f_rorvv8di_mask (TARGET_AVX512F)
#define HAVE_avx512vl_rolvv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rolvv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rorvv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rorvv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rolvv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rolvv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rorvv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rorvv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_rolv16si (TARGET_AVX512F)
#define HAVE_avx512f_rolv16si_mask (TARGET_AVX512F)
#define HAVE_avx512f_rorv16si (TARGET_AVX512F)
#define HAVE_avx512f_rorv16si_mask (TARGET_AVX512F)
#define HAVE_avx512vl_rolv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rolv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rorv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rorv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rolv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rolv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rorv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rorv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_rolv8di (TARGET_AVX512F)
#define HAVE_avx512f_rolv8di_mask (TARGET_AVX512F)
#define HAVE_avx512f_rorv8di (TARGET_AVX512F)
#define HAVE_avx512f_rorv8di_mask (TARGET_AVX512F)
#define HAVE_avx512vl_rolv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rolv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rorv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rorv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rolv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rolv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_rorv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_rorv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_smaxv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_sminv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_umaxv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_uminv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_smaxv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_sminv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_umaxv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_uminv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_smaxv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_sminv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_umaxv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_uminv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_smaxv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_sminv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_umaxv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_uminv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_smaxv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_sminv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_umaxv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_uminv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_smaxv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_sminv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_umaxv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_uminv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_eqv64qi3_1 (TARGET_AVX512F && ix86_binary_operator_ok (EQ, V64QImode, operands))
#define HAVE_avx512bw_eqv64qi3_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512F && ix86_binary_operator_ok (EQ, V64QImode, operands)))
#define HAVE_avx512vl_eqv16qi3_1 ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V16QImode, operands)) && (TARGET_AVX512VL))
#define HAVE_avx512vl_eqv16qi3_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V16QImode, operands)) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_eqv32qi3_1 ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V32QImode, operands)) && (TARGET_AVX512VL))
#define HAVE_avx512vl_eqv32qi3_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V32QImode, operands)) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_eqv32hi3_1 (TARGET_AVX512F && ix86_binary_operator_ok (EQ, V32HImode, operands))
#define HAVE_avx512bw_eqv32hi3_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512F && ix86_binary_operator_ok (EQ, V32HImode, operands)))
#define HAVE_avx512vl_eqv16hi3_1 ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V16HImode, operands)) && (TARGET_AVX512VL))
#define HAVE_avx512vl_eqv16hi3_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V16HImode, operands)) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_eqv8hi3_1 ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V8HImode, operands)) && (TARGET_AVX512VL))
#define HAVE_avx512vl_eqv8hi3_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V8HImode, operands)) && (TARGET_AVX512VL)))
#define HAVE_avx512f_eqv16si3_1 (TARGET_AVX512F && ix86_binary_operator_ok (EQ, V16SImode, operands))
#define HAVE_avx512f_eqv16si3_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512F && ix86_binary_operator_ok (EQ, V16SImode, operands)))
#define HAVE_avx512vl_eqv8si3_1 ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V8SImode, operands)) && (TARGET_AVX512VL))
#define HAVE_avx512vl_eqv8si3_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V8SImode, operands)) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_eqv4si3_1 ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V4SImode, operands)) && (TARGET_AVX512VL))
#define HAVE_avx512vl_eqv4si3_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V4SImode, operands)) && (TARGET_AVX512VL)))
#define HAVE_avx512f_eqv8di3_1 (TARGET_AVX512F && ix86_binary_operator_ok (EQ, V8DImode, operands))
#define HAVE_avx512f_eqv8di3_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512F && ix86_binary_operator_ok (EQ, V8DImode, operands)))
#define HAVE_avx512vl_eqv4di3_1 ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V4DImode, operands)) && (TARGET_AVX512VL))
#define HAVE_avx512vl_eqv4di3_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V4DImode, operands)) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_eqv2di3_1 ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V2DImode, operands)) && (TARGET_AVX512VL))
#define HAVE_avx512vl_eqv2di3_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512F && ix86_binary_operator_ok (EQ, V2DImode, operands)) && (TARGET_AVX512VL)))
#define HAVE_sse4_2_gtv2di3 (TARGET_SSE4_2)
#define HAVE_avx2_gtv32qi3 (TARGET_AVX2)
#define HAVE_avx2_gtv16hi3 (TARGET_AVX2)
#define HAVE_avx2_gtv8si3 (TARGET_AVX2)
#define HAVE_avx2_gtv4di3 (TARGET_AVX2)
#define HAVE_avx512f_gtv16si3 (TARGET_AVX512F)
#define HAVE_avx512f_gtv16si3_mask (TARGET_AVX512F)
#define HAVE_avx512vl_gtv8si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gtv8si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_gtv4si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gtv4si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_gtv8di3 (TARGET_AVX512F)
#define HAVE_avx512f_gtv8di3_mask (TARGET_AVX512F)
#define HAVE_avx512vl_gtv4di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gtv4di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_gtv2di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gtv2di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_gtv64qi3 (TARGET_AVX512BW)
#define HAVE_avx512bw_gtv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512vl_gtv16qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gtv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_gtv32qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gtv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_gtv32hi3 (TARGET_AVX512BW)
#define HAVE_avx512bw_gtv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512vl_gtv16hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gtv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_gtv8hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gtv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_sse2_gtv16qi3 (TARGET_SSE2 && !TARGET_XOP)
#define HAVE_sse2_gtv8hi3 (TARGET_SSE2 && !TARGET_XOP)
#define HAVE_sse2_gtv4si3 (TARGET_SSE2 && !TARGET_XOP)
#define HAVE_andv16si3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (AND, V16SImode, operands)) && (TARGET_AVX512F)))
#define HAVE_iorv16si3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (IOR, V16SImode, operands)) && (TARGET_AVX512F)))
#define HAVE_xorv16si3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (XOR, V16SImode, operands)) && (TARGET_AVX512F)))
#define HAVE_andv8si3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (AND, V8SImode, operands)) && (TARGET_AVX)))
#define HAVE_iorv8si3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (IOR, V8SImode, operands)) && (TARGET_AVX)))
#define HAVE_xorv8si3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (XOR, V8SImode, operands)) && (TARGET_AVX)))
#define HAVE_andv4si3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (AND, V4SImode, operands)))
#define HAVE_iorv4si3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (IOR, V4SImode, operands)))
#define HAVE_xorv4si3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (XOR, V4SImode, operands)))
#define HAVE_andv8di3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (AND, V8DImode, operands)) && (TARGET_AVX512F)))
#define HAVE_iorv8di3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (IOR, V8DImode, operands)) && (TARGET_AVX512F)))
#define HAVE_xorv8di3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (XOR, V8DImode, operands)) && (TARGET_AVX512F)))
#define HAVE_andv4di3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (AND, V4DImode, operands)) && (TARGET_AVX)))
#define HAVE_iorv4di3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (IOR, V4DImode, operands)) && (TARGET_AVX)))
#define HAVE_xorv4di3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (XOR, V4DImode, operands)) && (TARGET_AVX)))
#define HAVE_andv2di3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (AND, V2DImode, operands)))
#define HAVE_iorv2di3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (IOR, V2DImode, operands)))
#define HAVE_xorv2di3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) \
&& ix86_binary_operator_ok (XOR, V2DImode, operands)))
#define HAVE_avx512bw_testmv64qi3 (TARGET_AVX512BW)
#define HAVE_avx512bw_testmv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512vl_testmv16qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_testmv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_testmv32qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_testmv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_testmv32hi3 (TARGET_AVX512BW)
#define HAVE_avx512bw_testmv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512vl_testmv16hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_testmv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_testmv8hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_testmv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512f_testmv16si3 (TARGET_AVX512F)
#define HAVE_avx512f_testmv16si3_mask (TARGET_AVX512F)
#define HAVE_avx512vl_testmv8si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_testmv8si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_testmv4si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_testmv4si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_testmv8di3 (TARGET_AVX512F)
#define HAVE_avx512f_testmv8di3_mask (TARGET_AVX512F)
#define HAVE_avx512vl_testmv4di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_testmv4di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_testmv2di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_testmv2di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_testnmv64qi3 (TARGET_AVX512BW)
#define HAVE_avx512bw_testnmv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512vl_testnmv16qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_testnmv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_testnmv32qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_testnmv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_testnmv32hi3 (TARGET_AVX512BW)
#define HAVE_avx512bw_testnmv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512vl_testnmv16hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_testnmv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_testnmv8hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_testnmv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512f_testnmv16si3 (TARGET_AVX512F)
#define HAVE_avx512f_testnmv16si3_mask (TARGET_AVX512F)
#define HAVE_avx512vl_testnmv8si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_testnmv8si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_testnmv4si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_testnmv4si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_testnmv8di3 (TARGET_AVX512F)
#define HAVE_avx512f_testnmv8di3_mask (TARGET_AVX512F)
#define HAVE_avx512vl_testnmv4di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_testnmv4di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_testnmv2di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_testnmv2di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_packsswb ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_avx512bw_packsswb_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_avx2_packsswb ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
#define HAVE_avx2_packsswb_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_sse2_packsswb (TARGET_SSE2 && 1 && 1)
#define HAVE_sse2_packsswb_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_avx512bw_packssdw ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_avx512bw_packssdw_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_avx2_packssdw ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
#define HAVE_avx2_packssdw_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_sse2_packssdw (TARGET_SSE2 && 1 && 1)
#define HAVE_sse2_packssdw_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_avx512bw_packuswb ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_avx512bw_packuswb_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_avx2_packuswb ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
#define HAVE_avx2_packuswb_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_sse2_packuswb (TARGET_SSE2 && 1 && 1)
#define HAVE_sse2_packuswb_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_avx512bw_interleave_highv64qi (TARGET_AVX512BW)
#define HAVE_avx512bw_interleave_highv64qi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx2_interleave_highv32qi (TARGET_AVX2 && 1)
#define HAVE_avx2_interleave_highv32qi_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
#define HAVE_vec_interleave_highv16qi (TARGET_SSE2 && 1)
#define HAVE_vec_interleave_highv16qi_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
#define HAVE_avx512bw_interleave_lowv64qi (TARGET_AVX512BW)
#define HAVE_avx512bw_interleave_lowv64qi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx2_interleave_lowv32qi (TARGET_AVX2 && 1 && 1)
#define HAVE_avx2_interleave_lowv32qi_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL && TARGET_AVX512BW))
#define HAVE_vec_interleave_lowv16qi (TARGET_SSE2 && 1 && 1)
#define HAVE_vec_interleave_lowv16qi_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL && TARGET_AVX512BW))
#define HAVE_avx512bw_interleave_highv32hi (TARGET_AVX512BW)
#define HAVE_avx512bw_interleave_highv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx2_interleave_highv16hi (TARGET_AVX2 && 1 && 1)
#define HAVE_avx2_interleave_highv16hi_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL && TARGET_AVX512BW))
#define HAVE_vec_interleave_highv8hi (TARGET_SSE2 && 1 && 1)
#define HAVE_vec_interleave_highv8hi_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL && TARGET_AVX512BW))
#define HAVE_avx512bw_interleave_lowv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx2_interleave_lowv16hi (TARGET_AVX2 && 1 && 1)
#define HAVE_avx2_interleave_lowv16hi_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL && TARGET_AVX512BW))
#define HAVE_vec_interleave_lowv8hi (TARGET_SSE2 && 1 && 1)
#define HAVE_vec_interleave_lowv8hi_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL && TARGET_AVX512BW))
#define HAVE_avx2_interleave_highv8si (TARGET_AVX2 && 1)
#define HAVE_avx2_interleave_highv8si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
#define HAVE_avx512f_interleave_highv16si_mask (TARGET_AVX512F)
#define HAVE_vec_interleave_highv4si (TARGET_SSE2 && 1)
#define HAVE_vec_interleave_highv4si_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
#define HAVE_avx2_interleave_lowv8si (TARGET_AVX2 && 1)
#define HAVE_avx2_interleave_lowv8si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
#define HAVE_avx512f_interleave_lowv16si_mask (TARGET_AVX512F)
#define HAVE_vec_interleave_lowv4si (TARGET_SSE2 && 1)
#define HAVE_vec_interleave_lowv4si_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
#define HAVE_sse4_1_pinsrb ((TARGET_SSE2 \
&& ((unsigned) exact_log2 (INTVAL (operands[3])) \
< GET_MODE_NUNITS (V16QImode))) && (TARGET_SSE4_1))
#define HAVE_sse2_pinsrw (TARGET_SSE2 \
&& ((unsigned) exact_log2 (INTVAL (operands[3])) \
< GET_MODE_NUNITS (V8HImode)))
#define HAVE_sse4_1_pinsrd ((TARGET_SSE2 \
&& ((unsigned) exact_log2 (INTVAL (operands[3])) \
< GET_MODE_NUNITS (V4SImode))) && (TARGET_SSE4_1))
#define HAVE_sse4_1_pinsrq ((TARGET_SSE2 \
&& ((unsigned) exact_log2 (INTVAL (operands[3])) \
< GET_MODE_NUNITS (V2DImode))) && (TARGET_SSE4_1 && TARGET_64BIT))
#define HAVE_avx512dq_vinsertf64x2_1_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ)))
#define HAVE_avx512dq_vinserti64x2_1_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ)))
#define HAVE_avx512f_vinsertf32x4_1_mask (TARGET_AVX512F)
#define HAVE_avx512f_vinserti32x4_1_mask (TARGET_AVX512F)
#define HAVE_vec_set_lo_v16sf (TARGET_AVX512DQ)
#define HAVE_vec_set_lo_v16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_vec_set_lo_v16si (TARGET_AVX512DQ)
#define HAVE_vec_set_lo_v16si_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_vec_set_hi_v16sf (TARGET_AVX512DQ)
#define HAVE_vec_set_hi_v16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_vec_set_hi_v16si (TARGET_AVX512DQ)
#define HAVE_vec_set_hi_v16si_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_vec_set_lo_v8df (TARGET_AVX512F)
#define HAVE_vec_set_lo_v8df_mask (TARGET_AVX512F)
#define HAVE_vec_set_lo_v8di (TARGET_AVX512F)
#define HAVE_vec_set_lo_v8di_mask (TARGET_AVX512F)
#define HAVE_vec_set_hi_v8df (TARGET_AVX512F)
#define HAVE_vec_set_hi_v8df_mask (TARGET_AVX512F)
#define HAVE_vec_set_hi_v8di (TARGET_AVX512F)
#define HAVE_vec_set_hi_v8di_mask (TARGET_AVX512F)
#define HAVE_avx512dq_shuf_i64x2_1_mask ((TARGET_AVX512F) && (TARGET_AVX512VL \
&& (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
&& INTVAL (operands[5]) == (INTVAL (operands[6]) - 1))))
#define HAVE_avx512dq_shuf_f64x2_1_mask ((TARGET_AVX512F) && (TARGET_AVX512VL \
&& (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
&& INTVAL (operands[5]) == (INTVAL (operands[6]) - 1))))
#define HAVE_avx512f_shuf_f64x2_1 (TARGET_AVX512F \
&& (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
&& INTVAL (operands[5]) == (INTVAL (operands[6]) - 1) \
&& INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
&& INTVAL (operands[9]) == (INTVAL (operands[10]) - 1)))
#define HAVE_avx512f_shuf_f64x2_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
&& (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
&& INTVAL (operands[5]) == (INTVAL (operands[6]) - 1) \
&& INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
&& INTVAL (operands[9]) == (INTVAL (operands[10]) - 1))))
#define HAVE_avx512f_shuf_i64x2_1 (TARGET_AVX512F \
&& (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
&& INTVAL (operands[5]) == (INTVAL (operands[6]) - 1) \
&& INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
&& INTVAL (operands[9]) == (INTVAL (operands[10]) - 1)))
#define HAVE_avx512f_shuf_i64x2_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
&& (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
&& INTVAL (operands[5]) == (INTVAL (operands[6]) - 1) \
&& INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
&& INTVAL (operands[9]) == (INTVAL (operands[10]) - 1))))
#define HAVE_avx512vl_shuf_i32x4_1_mask ((TARGET_AVX512F) && (TARGET_AVX512VL \
&& (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
&& INTVAL (operands[3]) == (INTVAL (operands[5]) - 2) \
&& INTVAL (operands[3]) == (INTVAL (operands[6]) - 3) \
&& INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
&& INTVAL (operands[7]) == (INTVAL (operands[9]) - 2) \
&& INTVAL (operands[7]) == (INTVAL (operands[10]) - 3))))
#define HAVE_avx512vl_shuf_f32x4_1_mask ((TARGET_AVX512F) && (TARGET_AVX512VL \
&& (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
&& INTVAL (operands[3]) == (INTVAL (operands[5]) - 2) \
&& INTVAL (operands[3]) == (INTVAL (operands[6]) - 3) \
&& INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
&& INTVAL (operands[7]) == (INTVAL (operands[9]) - 2) \
&& INTVAL (operands[7]) == (INTVAL (operands[10]) - 3))))
#define HAVE_avx512f_shuf_f32x4_1 (TARGET_AVX512F \
&& (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
&& INTVAL (operands[3]) == (INTVAL (operands[5]) - 2) \
&& INTVAL (operands[3]) == (INTVAL (operands[6]) - 3) \
&& INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
&& INTVAL (operands[7]) == (INTVAL (operands[9]) - 2) \
&& INTVAL (operands[7]) == (INTVAL (operands[10]) - 3) \
&& INTVAL (operands[11]) == (INTVAL (operands[12]) - 1) \
&& INTVAL (operands[11]) == (INTVAL (operands[13]) - 2) \
&& INTVAL (operands[11]) == (INTVAL (operands[14]) - 3) \
&& INTVAL (operands[15]) == (INTVAL (operands[16]) - 1) \
&& INTVAL (operands[15]) == (INTVAL (operands[17]) - 2) \
&& INTVAL (operands[15]) == (INTVAL (operands[18]) - 3)))
#define HAVE_avx512f_shuf_f32x4_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
&& (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
&& INTVAL (operands[3]) == (INTVAL (operands[5]) - 2) \
&& INTVAL (operands[3]) == (INTVAL (operands[6]) - 3) \
&& INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
&& INTVAL (operands[7]) == (INTVAL (operands[9]) - 2) \
&& INTVAL (operands[7]) == (INTVAL (operands[10]) - 3) \
&& INTVAL (operands[11]) == (INTVAL (operands[12]) - 1) \
&& INTVAL (operands[11]) == (INTVAL (operands[13]) - 2) \
&& INTVAL (operands[11]) == (INTVAL (operands[14]) - 3) \
&& INTVAL (operands[15]) == (INTVAL (operands[16]) - 1) \
&& INTVAL (operands[15]) == (INTVAL (operands[17]) - 2) \
&& INTVAL (operands[15]) == (INTVAL (operands[18]) - 3))))
#define HAVE_avx512f_shuf_i32x4_1 (TARGET_AVX512F \
&& (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
&& INTVAL (operands[3]) == (INTVAL (operands[5]) - 2) \
&& INTVAL (operands[3]) == (INTVAL (operands[6]) - 3) \
&& INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
&& INTVAL (operands[7]) == (INTVAL (operands[9]) - 2) \
&& INTVAL (operands[7]) == (INTVAL (operands[10]) - 3) \
&& INTVAL (operands[11]) == (INTVAL (operands[12]) - 1) \
&& INTVAL (operands[11]) == (INTVAL (operands[13]) - 2) \
&& INTVAL (operands[11]) == (INTVAL (operands[14]) - 3) \
&& INTVAL (operands[15]) == (INTVAL (operands[16]) - 1) \
&& INTVAL (operands[15]) == (INTVAL (operands[17]) - 2) \
&& INTVAL (operands[15]) == (INTVAL (operands[18]) - 3)))
#define HAVE_avx512f_shuf_i32x4_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
&& (INTVAL (operands[3]) == (INTVAL (operands[4]) - 1) \
&& INTVAL (operands[3]) == (INTVAL (operands[5]) - 2) \
&& INTVAL (operands[3]) == (INTVAL (operands[6]) - 3) \
&& INTVAL (operands[7]) == (INTVAL (operands[8]) - 1) \
&& INTVAL (operands[7]) == (INTVAL (operands[9]) - 2) \
&& INTVAL (operands[7]) == (INTVAL (operands[10]) - 3) \
&& INTVAL (operands[11]) == (INTVAL (operands[12]) - 1) \
&& INTVAL (operands[11]) == (INTVAL (operands[13]) - 2) \
&& INTVAL (operands[11]) == (INTVAL (operands[14]) - 3) \
&& INTVAL (operands[15]) == (INTVAL (operands[16]) - 1) \
&& INTVAL (operands[15]) == (INTVAL (operands[17]) - 2) \
&& INTVAL (operands[15]) == (INTVAL (operands[18]) - 3))))
#define HAVE_avx512f_pshufd_1 (TARGET_AVX512F \
&& INTVAL (operands[2]) + 4 == INTVAL (operands[6]) \
&& INTVAL (operands[3]) + 4 == INTVAL (operands[7]) \
&& INTVAL (operands[4]) + 4 == INTVAL (operands[8]) \
&& INTVAL (operands[5]) + 4 == INTVAL (operands[9]) \
&& INTVAL (operands[2]) + 8 == INTVAL (operands[10]) \
&& INTVAL (operands[3]) + 8 == INTVAL (operands[11]) \
&& INTVAL (operands[4]) + 8 == INTVAL (operands[12]) \
&& INTVAL (operands[5]) + 8 == INTVAL (operands[13]) \
&& INTVAL (operands[2]) + 12 == INTVAL (operands[14]) \
&& INTVAL (operands[3]) + 12 == INTVAL (operands[15]) \
&& INTVAL (operands[4]) + 12 == INTVAL (operands[16]) \
&& INTVAL (operands[5]) + 12 == INTVAL (operands[17]))
#define HAVE_avx512f_pshufd_1_mask ((TARGET_AVX512F) && (TARGET_AVX512F \
&& INTVAL (operands[2]) + 4 == INTVAL (operands[6]) \
&& INTVAL (operands[3]) + 4 == INTVAL (operands[7]) \
&& INTVAL (operands[4]) + 4 == INTVAL (operands[8]) \
&& INTVAL (operands[5]) + 4 == INTVAL (operands[9]) \
&& INTVAL (operands[2]) + 8 == INTVAL (operands[10]) \
&& INTVAL (operands[3]) + 8 == INTVAL (operands[11]) \
&& INTVAL (operands[4]) + 8 == INTVAL (operands[12]) \
&& INTVAL (operands[5]) + 8 == INTVAL (operands[13]) \
&& INTVAL (operands[2]) + 12 == INTVAL (operands[14]) \
&& INTVAL (operands[3]) + 12 == INTVAL (operands[15]) \
&& INTVAL (operands[4]) + 12 == INTVAL (operands[16]) \
&& INTVAL (operands[5]) + 12 == INTVAL (operands[17])))
#define HAVE_avx2_pshufd_1 (TARGET_AVX2 \
&& 1 \
&& INTVAL (operands[2]) + 4 == INTVAL (operands[6]) \
&& INTVAL (operands[3]) + 4 == INTVAL (operands[7]) \
&& INTVAL (operands[4]) + 4 == INTVAL (operands[8]) \
&& INTVAL (operands[5]) + 4 == INTVAL (operands[9]))
#define HAVE_avx2_pshufd_1_mask ((TARGET_AVX512F) && (TARGET_AVX2 \
&& TARGET_AVX512VL \
&& INTVAL (operands[2]) + 4 == INTVAL (operands[6]) \
&& INTVAL (operands[3]) + 4 == INTVAL (operands[7]) \
&& INTVAL (operands[4]) + 4 == INTVAL (operands[8]) \
&& INTVAL (operands[5]) + 4 == INTVAL (operands[9])))
#define HAVE_sse2_pshufd_1 (TARGET_SSE2 && 1)
#define HAVE_sse2_pshufd_1_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
#define HAVE_avx512bw_pshuflwv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx2_pshuflw_1 (TARGET_AVX2 \
&& 1 && 1 \
&& INTVAL (operands[2]) + 8 == INTVAL (operands[6]) \
&& INTVAL (operands[3]) + 8 == INTVAL (operands[7]) \
&& INTVAL (operands[4]) + 8 == INTVAL (operands[8]) \
&& INTVAL (operands[5]) + 8 == INTVAL (operands[9]))
#define HAVE_avx2_pshuflw_1_mask ((TARGET_AVX512F) && (TARGET_AVX2 \
&& TARGET_AVX512BW && TARGET_AVX512VL \
&& INTVAL (operands[2]) + 8 == INTVAL (operands[6]) \
&& INTVAL (operands[3]) + 8 == INTVAL (operands[7]) \
&& INTVAL (operands[4]) + 8 == INTVAL (operands[8]) \
&& INTVAL (operands[5]) + 8 == INTVAL (operands[9])))
#define HAVE_sse2_pshuflw_1 (TARGET_SSE2 && 1 && 1)
#define HAVE_sse2_pshuflw_1_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512BW && TARGET_AVX512VL))
#define HAVE_avx512bw_pshufhwv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx2_pshufhw_1 (TARGET_AVX2 \
&& 1 && 1 \
&& INTVAL (operands[2]) + 8 == INTVAL (operands[6]) \
&& INTVAL (operands[3]) + 8 == INTVAL (operands[7]) \
&& INTVAL (operands[4]) + 8 == INTVAL (operands[8]) \
&& INTVAL (operands[5]) + 8 == INTVAL (operands[9]))
#define HAVE_avx2_pshufhw_1_mask ((TARGET_AVX512F) && (TARGET_AVX2 \
&& TARGET_AVX512BW && TARGET_AVX512VL \
&& INTVAL (operands[2]) + 8 == INTVAL (operands[6]) \
&& INTVAL (operands[3]) + 8 == INTVAL (operands[7]) \
&& INTVAL (operands[4]) + 8 == INTVAL (operands[8]) \
&& INTVAL (operands[5]) + 8 == INTVAL (operands[9])))
#define HAVE_sse2_pshufhw_1 (TARGET_SSE2 && 1 && 1)
#define HAVE_sse2_pshufhw_1_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512BW && TARGET_AVX512VL))
#define HAVE_sse2_loadld (TARGET_SSE)
#define HAVE_vec_concatv2di (TARGET_SSE)
#define HAVE_avx512f_psadbw ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_avx2_psadbw ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_sse2_psadbw (TARGET_SSE2)
#define HAVE_avx_movmskps256 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_sse_movmskps (TARGET_SSE)
#define HAVE_avx_movmskpd256 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_sse2_movmskpd ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_avx2_pmovmskb ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_sse2_pmovmskb (TARGET_SSE2)
#define HAVE_sse_ldmxcsr (TARGET_SSE)
#define HAVE_sse_stmxcsr (TARGET_SSE)
#define HAVE_sse2_clflush (TARGET_SSE2)
#define HAVE_sse3_mwait (TARGET_SSE3)
#define HAVE_sse3_monitor_si ((TARGET_SSE3) && (Pmode == SImode))
#define HAVE_sse3_monitor_di ((TARGET_SSE3) && (Pmode == DImode))
#define HAVE_avx2_phaddwv16hi3 (TARGET_AVX2)
#define HAVE_avx2_phaddswv16hi3 (TARGET_AVX2)
#define HAVE_avx2_phsubwv16hi3 (TARGET_AVX2)
#define HAVE_avx2_phsubswv16hi3 (TARGET_AVX2)
#define HAVE_ssse3_phaddwv8hi3 (TARGET_SSSE3)
#define HAVE_ssse3_phaddswv8hi3 (TARGET_SSSE3)
#define HAVE_ssse3_phsubwv8hi3 (TARGET_SSSE3)
#define HAVE_ssse3_phsubswv8hi3 (TARGET_SSSE3)
#define HAVE_ssse3_phaddwv4hi3 (TARGET_SSSE3)
#define HAVE_ssse3_phaddswv4hi3 (TARGET_SSSE3)
#define HAVE_ssse3_phsubwv4hi3 (TARGET_SSSE3)
#define HAVE_ssse3_phsubswv4hi3 (TARGET_SSSE3)
#define HAVE_avx2_phadddv8si3 (TARGET_AVX2)
#define HAVE_avx2_phsubdv8si3 (TARGET_AVX2)
#define HAVE_ssse3_phadddv4si3 (TARGET_SSSE3)
#define HAVE_ssse3_phsubdv4si3 (TARGET_SSSE3)
#define HAVE_ssse3_phadddv2si3 (TARGET_SSSE3)
#define HAVE_ssse3_phsubdv2si3 (TARGET_SSSE3)
#define HAVE_avx2_pmaddubsw256 (TARGET_AVX2)
#define HAVE_avx512bw_pmaddubsw512v8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512bw_pmaddubsw512v8hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_pmaddubsw512v16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512bw_pmaddubsw512v16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_pmaddubsw512v32hi (TARGET_AVX512BW)
#define HAVE_avx512bw_pmaddubsw512v32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512bw_umulhrswv32hi3 (TARGET_AVX512BW)
#define HAVE_avx512bw_umulhrswv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_ssse3_pmaddubsw128 (TARGET_SSSE3)
#define HAVE_ssse3_pmaddubsw (TARGET_SSSE3)
#define HAVE_avx512bw_pshufbv64qi3 ((TARGET_SSSE3 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_avx512bw_pshufbv64qi3_mask ((TARGET_AVX512F) && ((TARGET_SSSE3 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_avx2_pshufbv32qi3 ((TARGET_SSSE3 && 1 && 1) && (TARGET_AVX2))
#define HAVE_avx2_pshufbv32qi3_mask ((TARGET_AVX512F) && ((TARGET_SSSE3 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_ssse3_pshufbv16qi3 (TARGET_SSSE3 && 1 && 1)
#define HAVE_ssse3_pshufbv16qi3_mask ((TARGET_AVX512F) && (TARGET_SSSE3 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_ssse3_pshufbv8qi3 (TARGET_SSSE3)
#define HAVE_avx2_psignv32qi3 ((TARGET_SSSE3) && (TARGET_AVX2))
#define HAVE_ssse3_psignv16qi3 (TARGET_SSSE3)
#define HAVE_avx2_psignv16hi3 ((TARGET_SSSE3) && (TARGET_AVX2))
#define HAVE_ssse3_psignv8hi3 (TARGET_SSSE3)
#define HAVE_avx2_psignv8si3 ((TARGET_SSSE3) && (TARGET_AVX2))
#define HAVE_ssse3_psignv4si3 (TARGET_SSSE3)
#define HAVE_ssse3_psignv8qi3 (TARGET_SSSE3)
#define HAVE_ssse3_psignv4hi3 (TARGET_SSSE3)
#define HAVE_ssse3_psignv2si3 (TARGET_SSSE3)
#define HAVE_avx512bw_palignrv64qi_mask ((TARGET_AVX512BW && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512BW))
#define HAVE_avx2_palignrv32qi_mask ((TARGET_AVX512BW && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX2))
#define HAVE_ssse3_palignrv16qi_mask (TARGET_AVX512BW && (16 == 64 || TARGET_AVX512VL))
#define HAVE_avx512bw_palignrv4ti ((TARGET_SSSE3) && (TARGET_AVX512BW))
#define HAVE_avx2_palignrv2ti ((TARGET_SSSE3) && (TARGET_AVX2))
#define HAVE_ssse3_palignrti (TARGET_SSSE3)
#define HAVE_ssse3_palignrdi (TARGET_SSSE3)
#define HAVE_absv16si2_mask (TARGET_AVX512F)
#define HAVE_absv8si2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_absv4si2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_absv8di2_mask (TARGET_AVX512F)
#define HAVE_absv4di2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_absv2di2_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_absv64qi2_mask (TARGET_AVX512BW)
#define HAVE_absv16qi2_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_absv32qi2_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_absv32hi2_mask (TARGET_AVX512BW)
#define HAVE_absv16hi2_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_absv8hi2_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_absv8qi2 (TARGET_SSSE3)
#define HAVE_absv4hi2 (TARGET_SSSE3)
#define HAVE_absv2si2 (TARGET_SSSE3)
#define HAVE_sse4a_movntsf (TARGET_SSE4A)
#define HAVE_sse4a_movntdf (TARGET_SSE4A)
#define HAVE_sse4a_vmmovntv4sf (TARGET_SSE4A)
#define HAVE_sse4a_vmmovntv2df ((TARGET_SSE4A) && (TARGET_SSE2))
#define HAVE_sse4a_extrqi (TARGET_SSE4A)
#define HAVE_sse4a_extrq (TARGET_SSE4A)
#define HAVE_sse4a_insertqi (TARGET_SSE4A)
#define HAVE_sse4a_insertq (TARGET_SSE4A)
#define HAVE_avx_blendps256 ((TARGET_SSE4_1) && (TARGET_AVX))
#define HAVE_sse4_1_blendps (TARGET_SSE4_1)
#define HAVE_avx_blendpd256 ((TARGET_SSE4_1) && (TARGET_AVX))
#define HAVE_sse4_1_blendpd ((TARGET_SSE4_1) && (TARGET_SSE2))
#define HAVE_avx_blendvps256 ((TARGET_SSE4_1) && (TARGET_AVX))
#define HAVE_sse4_1_blendvps (TARGET_SSE4_1)
#define HAVE_avx_blendvpd256 ((TARGET_SSE4_1) && (TARGET_AVX))
#define HAVE_sse4_1_blendvpd ((TARGET_SSE4_1) && (TARGET_SSE2))
#define HAVE_avx_dpps256 ((TARGET_SSE4_1) && (TARGET_AVX))
#define HAVE_sse4_1_dpps (TARGET_SSE4_1)
#define HAVE_avx_dppd256 ((TARGET_SSE4_1) && (TARGET_AVX))
#define HAVE_sse4_1_dppd ((TARGET_SSE4_1) && (TARGET_SSE2))
#define HAVE_avx512f_movntdqa ((TARGET_SSE4_1) && (TARGET_AVX512F))
#define HAVE_avx2_movntdqa ((TARGET_SSE4_1) && (TARGET_AVX2))
#define HAVE_sse4_1_movntdqa (TARGET_SSE4_1)
#define HAVE_avx2_mpsadbw ((TARGET_SSE4_1) && (TARGET_AVX2))
#define HAVE_sse4_1_mpsadbw (TARGET_SSE4_1)
#define HAVE_avx512bw_packusdw ((TARGET_SSE4_1 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_avx512bw_packusdw_mask ((TARGET_AVX512F) && ((TARGET_SSE4_1 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_avx2_packusdw ((TARGET_SSE4_1 && 1 && 1) && (TARGET_AVX2))
#define HAVE_avx2_packusdw_mask ((TARGET_AVX512F) && ((TARGET_SSE4_1 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_sse4_1_packusdw (TARGET_SSE4_1 && 1 && 1)
#define HAVE_sse4_1_packusdw_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_avx2_pblendvb ((TARGET_SSE4_1) && (TARGET_AVX2))
#define HAVE_sse4_1_pblendvb (TARGET_SSE4_1)
#define HAVE_sse4_1_pblendw (TARGET_SSE4_1)
#define HAVE_avx2_pblenddv8si (TARGET_AVX2)
#define HAVE_avx2_pblenddv4si (TARGET_AVX2)
#define HAVE_sse4_1_phminposuw (TARGET_SSE4_1)
#define HAVE_avx2_sign_extendv16qiv16hi2 (TARGET_AVX2 && 1 && 1)
#define HAVE_avx2_sign_extendv16qiv16hi2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512BW && TARGET_AVX512VL))
#define HAVE_avx2_zero_extendv16qiv16hi2 (TARGET_AVX2 && 1 && 1)
#define HAVE_avx2_zero_extendv16qiv16hi2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512BW && TARGET_AVX512VL))
#define HAVE_avx512bw_sign_extendv32qiv32hi2 (TARGET_AVX512BW)
#define HAVE_avx512bw_sign_extendv32qiv32hi2_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512bw_zero_extendv32qiv32hi2 (TARGET_AVX512BW)
#define HAVE_avx512bw_zero_extendv32qiv32hi2_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_sse4_1_sign_extendv8qiv8hi2 (TARGET_SSE4_1 && 1 && 1)
#define HAVE_sse4_1_sign_extendv8qiv8hi2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512BW && TARGET_AVX512VL))
#define HAVE_sse4_1_zero_extendv8qiv8hi2 (TARGET_SSE4_1 && 1 && 1)
#define HAVE_sse4_1_zero_extendv8qiv8hi2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512BW && TARGET_AVX512VL))
#define HAVE_avx512f_sign_extendv16qiv16si2_mask (TARGET_AVX512F)
#define HAVE_avx512f_zero_extendv16qiv16si2_mask (TARGET_AVX512F)
#define HAVE_avx2_sign_extendv8qiv8si2 (TARGET_AVX2 && 1)
#define HAVE_avx2_sign_extendv8qiv8si2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
#define HAVE_avx2_zero_extendv8qiv8si2 (TARGET_AVX2 && 1)
#define HAVE_avx2_zero_extendv8qiv8si2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
#define HAVE_sse4_1_sign_extendv4qiv4si2 (TARGET_SSE4_1 && 1)
#define HAVE_sse4_1_sign_extendv4qiv4si2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
#define HAVE_sse4_1_zero_extendv4qiv4si2 (TARGET_SSE4_1 && 1)
#define HAVE_sse4_1_zero_extendv4qiv4si2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
#define HAVE_avx512f_sign_extendv16hiv16si2 (TARGET_AVX512F)
#define HAVE_avx512f_sign_extendv16hiv16si2_mask (TARGET_AVX512F)
#define HAVE_avx512f_zero_extendv16hiv16si2 (TARGET_AVX512F)
#define HAVE_avx512f_zero_extendv16hiv16si2_mask (TARGET_AVX512F)
#define HAVE_avx2_sign_extendv8hiv8si2 (TARGET_AVX2 && 1)
#define HAVE_avx2_sign_extendv8hiv8si2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
#define HAVE_avx2_zero_extendv8hiv8si2 (TARGET_AVX2 && 1)
#define HAVE_avx2_zero_extendv8hiv8si2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
#define HAVE_sse4_1_sign_extendv4hiv4si2 (TARGET_SSE4_1 && 1)
#define HAVE_sse4_1_sign_extendv4hiv4si2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
#define HAVE_sse4_1_zero_extendv4hiv4si2 (TARGET_SSE4_1 && 1)
#define HAVE_sse4_1_zero_extendv4hiv4si2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
#define HAVE_avx512f_sign_extendv8qiv8di2 (TARGET_AVX512F)
#define HAVE_avx512f_sign_extendv8qiv8di2_mask (TARGET_AVX512F)
#define HAVE_avx512f_zero_extendv8qiv8di2 (TARGET_AVX512F)
#define HAVE_avx512f_zero_extendv8qiv8di2_mask (TARGET_AVX512F)
#define HAVE_avx2_sign_extendv4qiv4di2 (TARGET_AVX2 && 1)
#define HAVE_avx2_sign_extendv4qiv4di2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
#define HAVE_avx2_zero_extendv4qiv4di2 (TARGET_AVX2 && 1)
#define HAVE_avx2_zero_extendv4qiv4di2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
#define HAVE_sse4_1_sign_extendv2qiv2di2 (TARGET_SSE4_1 && 1)
#define HAVE_sse4_1_sign_extendv2qiv2di2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
#define HAVE_sse4_1_zero_extendv2qiv2di2 (TARGET_SSE4_1 && 1)
#define HAVE_sse4_1_zero_extendv2qiv2di2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
#define HAVE_avx512f_sign_extendv8hiv8di2 (TARGET_AVX512F)
#define HAVE_avx512f_sign_extendv8hiv8di2_mask (TARGET_AVX512F)
#define HAVE_avx512f_zero_extendv8hiv8di2 (TARGET_AVX512F)
#define HAVE_avx512f_zero_extendv8hiv8di2_mask (TARGET_AVX512F)
#define HAVE_avx2_sign_extendv4hiv4di2 (TARGET_AVX2 && 1)
#define HAVE_avx2_sign_extendv4hiv4di2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
#define HAVE_avx2_zero_extendv4hiv4di2 (TARGET_AVX2 && 1)
#define HAVE_avx2_zero_extendv4hiv4di2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
#define HAVE_sse4_1_sign_extendv2hiv2di2 (TARGET_SSE4_1 && 1)
#define HAVE_sse4_1_sign_extendv2hiv2di2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
#define HAVE_sse4_1_zero_extendv2hiv2di2 (TARGET_SSE4_1 && 1)
#define HAVE_sse4_1_zero_extendv2hiv2di2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
#define HAVE_avx512f_sign_extendv8siv8di2 (TARGET_AVX512F)
#define HAVE_avx512f_sign_extendv8siv8di2_mask (TARGET_AVX512F)
#define HAVE_avx512f_zero_extendv8siv8di2 (TARGET_AVX512F)
#define HAVE_avx512f_zero_extendv8siv8di2_mask (TARGET_AVX512F)
#define HAVE_avx2_sign_extendv4siv4di2 (TARGET_AVX2 && 1)
#define HAVE_avx2_sign_extendv4siv4di2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
#define HAVE_avx2_zero_extendv4siv4di2 (TARGET_AVX2 && 1)
#define HAVE_avx2_zero_extendv4siv4di2_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
#define HAVE_sse4_1_sign_extendv2siv2di2 (TARGET_SSE4_1 && 1)
#define HAVE_sse4_1_sign_extendv2siv2di2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
#define HAVE_sse4_1_zero_extendv2siv2di2 (TARGET_SSE4_1 && 1)
#define HAVE_sse4_1_zero_extendv2siv2di2_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
#define HAVE_avx_vtestps256 (TARGET_AVX)
#define HAVE_avx_vtestps (TARGET_AVX)
#define HAVE_avx_vtestpd256 (TARGET_AVX)
#define HAVE_avx_vtestpd ((TARGET_AVX) && (TARGET_SSE2))
#define HAVE_sse4_1_ptestv16qi (TARGET_SSE4_1)
#define HAVE_sse4_1_ptestv8hi (TARGET_SSE4_1)
#define HAVE_sse4_1_ptestv4si (TARGET_SSE4_1)
#define HAVE_sse4_1_ptestv2di (TARGET_SSE4_1)
#define HAVE_sse4_1_ptestv4sf (TARGET_SSE4_1)
#define HAVE_sse4_1_ptestv2df (TARGET_SSE4_1)
#define HAVE_avx_ptestv32qi ((TARGET_SSE4_1) && (TARGET_AVX))
#define HAVE_avx_ptestv16hi ((TARGET_SSE4_1) && (TARGET_AVX))
#define HAVE_avx_ptestv8si ((TARGET_SSE4_1) && (TARGET_AVX))
#define HAVE_avx_ptestv4di ((TARGET_SSE4_1) && (TARGET_AVX))
#define HAVE_avx_ptestv8sf ((TARGET_SSE4_1) && (TARGET_AVX))
#define HAVE_avx_ptestv4df ((TARGET_SSE4_1) && (TARGET_AVX))
#define HAVE_avx_roundps256 ((TARGET_ROUND) && (TARGET_AVX))
#define HAVE_sse4_1_roundps (TARGET_ROUND)
#define HAVE_avx_roundpd256 ((TARGET_ROUND) && (TARGET_AVX))
#define HAVE_sse4_1_roundpd ((TARGET_ROUND) && (TARGET_SSE2))
#define HAVE_sse4_1_roundss (TARGET_ROUND)
#define HAVE_sse4_1_roundsd ((TARGET_ROUND) && (TARGET_SSE2))
#define HAVE_sse4_2_pcmpestr (TARGET_SSE4_2 \
&& can_create_pseudo_p ())
#define HAVE_sse4_2_pcmpestri (TARGET_SSE4_2)
#define HAVE_sse4_2_pcmpestrm (TARGET_SSE4_2)
#define HAVE_sse4_2_pcmpestr_cconly (TARGET_SSE4_2)
#define HAVE_sse4_2_pcmpistr (TARGET_SSE4_2 \
&& can_create_pseudo_p ())
#define HAVE_sse4_2_pcmpistri (TARGET_SSE4_2)
#define HAVE_sse4_2_pcmpistrm (TARGET_SSE4_2)
#define HAVE_sse4_2_pcmpistr_cconly (TARGET_SSE4_2)
#define HAVE_avx512er_exp2v16sf (TARGET_AVX512ER)
#define HAVE_avx512er_exp2v16sf_round ((TARGET_AVX512F) && (TARGET_AVX512ER))
#define HAVE_avx512er_exp2v16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512ER))
#define HAVE_avx512er_exp2v16sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512ER)))
#define HAVE_avx512er_exp2v8df (TARGET_AVX512ER)
#define HAVE_avx512er_exp2v8df_round ((TARGET_AVX512F) && (TARGET_AVX512ER))
#define HAVE_avx512er_exp2v8df_mask ((TARGET_AVX512F) && (TARGET_AVX512ER))
#define HAVE_avx512er_exp2v8df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512ER)))
#define HAVE_avx512er_rcp28v16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512ER))
#define HAVE_avx512er_rcp28v16sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512ER)))
#define HAVE_avx512er_rcp28v8df_mask ((TARGET_AVX512F) && (TARGET_AVX512ER))
#define HAVE_avx512er_rcp28v8df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512ER)))
#define HAVE_avx512er_vmrcp28v4sf (TARGET_AVX512ER)
#define HAVE_avx512er_vmrcp28v4sf_round ((TARGET_AVX512F) && (TARGET_AVX512ER))
#define HAVE_avx512er_vmrcp28v2df ((TARGET_AVX512ER) && (TARGET_SSE2))
#define HAVE_avx512er_vmrcp28v2df_round ((TARGET_AVX512F) && ((TARGET_AVX512ER) && (TARGET_SSE2)))
#define HAVE_avx512er_rsqrt28v16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512ER))
#define HAVE_avx512er_rsqrt28v16sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512ER)))
#define HAVE_avx512er_rsqrt28v8df_mask ((TARGET_AVX512F) && (TARGET_AVX512ER))
#define HAVE_avx512er_rsqrt28v8df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512ER)))
#define HAVE_avx512er_vmrsqrt28v4sf (TARGET_AVX512ER)
#define HAVE_avx512er_vmrsqrt28v4sf_round ((TARGET_AVX512F) && (TARGET_AVX512ER))
#define HAVE_avx512er_vmrsqrt28v2df ((TARGET_AVX512ER) && (TARGET_SSE2))
#define HAVE_avx512er_vmrsqrt28v2df_round ((TARGET_AVX512F) && ((TARGET_AVX512ER) && (TARGET_SSE2)))
#define HAVE_xop_pmacsww (TARGET_XOP)
#define HAVE_xop_pmacssww (TARGET_XOP)
#define HAVE_xop_pmacsdd (TARGET_XOP)
#define HAVE_xop_pmacssdd (TARGET_XOP)
#define HAVE_xop_pmacsdql (TARGET_XOP)
#define HAVE_xop_pmacssdql (TARGET_XOP)
#define HAVE_xop_pmacsdqh (TARGET_XOP)
#define HAVE_xop_pmacssdqh (TARGET_XOP)
#define HAVE_xop_pmacswd (TARGET_XOP)
#define HAVE_xop_pmacsswd (TARGET_XOP)
#define HAVE_xop_pmadcswd (TARGET_XOP)
#define HAVE_xop_pmadcsswd (TARGET_XOP)
#define HAVE_xop_pcmov_v32qi256 ((TARGET_XOP) && (TARGET_AVX))
#define HAVE_xop_pcmov_v16qi (TARGET_XOP)
#define HAVE_xop_pcmov_v16hi256 ((TARGET_XOP) && (TARGET_AVX))
#define HAVE_xop_pcmov_v8hi (TARGET_XOP)
#define HAVE_xop_pcmov_v16si512 ((TARGET_XOP) && (TARGET_AVX512F))
#define HAVE_xop_pcmov_v8si256 ((TARGET_XOP) && (TARGET_AVX))
#define HAVE_xop_pcmov_v4si (TARGET_XOP)
#define HAVE_xop_pcmov_v8di512 ((TARGET_XOP) && (TARGET_AVX512F))
#define HAVE_xop_pcmov_v4di256 ((TARGET_XOP) && (TARGET_AVX))
#define HAVE_xop_pcmov_v2di (TARGET_XOP)
#define HAVE_xop_pcmov_v16sf512 ((TARGET_XOP) && (TARGET_AVX512F))
#define HAVE_xop_pcmov_v8sf256 ((TARGET_XOP) && (TARGET_AVX))
#define HAVE_xop_pcmov_v4sf (TARGET_XOP)
#define HAVE_xop_pcmov_v8df512 ((TARGET_XOP) && (TARGET_AVX512F))
#define HAVE_xop_pcmov_v4df256 ((TARGET_XOP) && (TARGET_AVX))
#define HAVE_xop_pcmov_v2df ((TARGET_XOP) && (TARGET_SSE2))
#define HAVE_xop_phaddbw (TARGET_XOP)
#define HAVE_xop_phaddubw (TARGET_XOP)
#define HAVE_xop_phaddbd (TARGET_XOP)
#define HAVE_xop_phaddubd (TARGET_XOP)
#define HAVE_xop_phaddbq (TARGET_XOP)
#define HAVE_xop_phaddubq (TARGET_XOP)
#define HAVE_xop_phaddwd (TARGET_XOP)
#define HAVE_xop_phadduwd (TARGET_XOP)
#define HAVE_xop_phaddwq (TARGET_XOP)
#define HAVE_xop_phadduwq (TARGET_XOP)
#define HAVE_xop_phadddq (TARGET_XOP)
#define HAVE_xop_phaddudq (TARGET_XOP)
#define HAVE_xop_phsubbw (TARGET_XOP)
#define HAVE_xop_phsubwd (TARGET_XOP)
#define HAVE_xop_phsubdq (TARGET_XOP)
#define HAVE_xop_pperm (TARGET_XOP && !(MEM_P (operands[2]) && MEM_P (operands[3])))
#define HAVE_xop_pperm_pack_v2di_v4si (TARGET_XOP && !(MEM_P (operands[2]) && MEM_P (operands[3])))
#define HAVE_xop_pperm_pack_v4si_v8hi (TARGET_XOP && !(MEM_P (operands[2]) && MEM_P (operands[3])))
#define HAVE_xop_pperm_pack_v8hi_v16qi (TARGET_XOP && !(MEM_P (operands[2]) && MEM_P (operands[3])))
#define HAVE_xop_rotlv16qi3 (TARGET_XOP)
#define HAVE_xop_rotlv8hi3 (TARGET_XOP)
#define HAVE_xop_rotlv4si3 (TARGET_XOP)
#define HAVE_xop_rotlv2di3 (TARGET_XOP)
#define HAVE_xop_rotrv16qi3 (TARGET_XOP)
#define HAVE_xop_rotrv8hi3 (TARGET_XOP)
#define HAVE_xop_rotrv4si3 (TARGET_XOP)
#define HAVE_xop_rotrv2di3 (TARGET_XOP)
#define HAVE_xop_vrotlv16qi3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
#define HAVE_xop_vrotlv8hi3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
#define HAVE_xop_vrotlv4si3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
#define HAVE_xop_vrotlv2di3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
#define HAVE_xop_shav16qi3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
#define HAVE_xop_shav8hi3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
#define HAVE_xop_shav4si3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
#define HAVE_xop_shav2di3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
#define HAVE_xop_shlv16qi3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
#define HAVE_xop_shlv8hi3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
#define HAVE_xop_shlv4si3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
#define HAVE_xop_shlv2di3 (TARGET_XOP && !(MEM_P (operands[1]) && MEM_P (operands[2])))
#define HAVE_xop_frczsf2 (TARGET_XOP)
#define HAVE_xop_frczdf2 (TARGET_XOP)
#define HAVE_xop_frczv4sf2 (TARGET_XOP)
#define HAVE_xop_frczv2df2 (TARGET_XOP)
#define HAVE_xop_frczv8sf2 (TARGET_XOP)
#define HAVE_xop_frczv4df2 (TARGET_XOP)
#define HAVE_xop_maskcmpv16qi3 (TARGET_XOP)
#define HAVE_xop_maskcmpv8hi3 (TARGET_XOP)
#define HAVE_xop_maskcmpv4si3 (TARGET_XOP)
#define HAVE_xop_maskcmpv2di3 (TARGET_XOP)
#define HAVE_xop_maskcmp_unsv16qi3 (TARGET_XOP)
#define HAVE_xop_maskcmp_unsv8hi3 (TARGET_XOP)
#define HAVE_xop_maskcmp_unsv4si3 (TARGET_XOP)
#define HAVE_xop_maskcmp_unsv2di3 (TARGET_XOP)
#define HAVE_xop_maskcmp_uns2v16qi3 (TARGET_XOP)
#define HAVE_xop_maskcmp_uns2v8hi3 (TARGET_XOP)
#define HAVE_xop_maskcmp_uns2v4si3 (TARGET_XOP)
#define HAVE_xop_maskcmp_uns2v2di3 (TARGET_XOP)
#define HAVE_xop_pcom_tfv16qi3 (TARGET_XOP)
#define HAVE_xop_pcom_tfv8hi3 (TARGET_XOP)
#define HAVE_xop_pcom_tfv4si3 (TARGET_XOP)
#define HAVE_xop_pcom_tfv2di3 (TARGET_XOP)
#define HAVE_xop_vpermil2v8sf3 ((TARGET_XOP) && (TARGET_AVX))
#define HAVE_xop_vpermil2v4sf3 (TARGET_XOP)
#define HAVE_xop_vpermil2v4df3 ((TARGET_XOP) && (TARGET_AVX))
#define HAVE_xop_vpermil2v2df3 ((TARGET_XOP) && (TARGET_SSE2))
#define HAVE_aesenc (TARGET_AES)
#define HAVE_aesenclast (TARGET_AES)
#define HAVE_aesdec (TARGET_AES)
#define HAVE_aesdeclast (TARGET_AES)
#define HAVE_aesimc (TARGET_AES)
#define HAVE_aeskeygenassist (TARGET_AES)
#define HAVE_pclmulqdq (TARGET_PCLMUL)
#define HAVE_avx_vzeroupper (TARGET_AVX)
#define HAVE_avx2_pbroadcastv16si ((TARGET_AVX2) && (TARGET_AVX512F))
#define HAVE_avx2_pbroadcastv8di ((TARGET_AVX2) && (TARGET_AVX512F))
#define HAVE_avx2_pbroadcastv64qi ((TARGET_AVX2) && (TARGET_AVX512BW))
#define HAVE_avx2_pbroadcastv32qi ((TARGET_AVX2) && (TARGET_AVX))
#define HAVE_avx2_pbroadcastv16qi (TARGET_AVX2)
#define HAVE_avx2_pbroadcastv32hi ((TARGET_AVX2) && (TARGET_AVX512BW))
#define HAVE_avx2_pbroadcastv16hi ((TARGET_AVX2) && (TARGET_AVX))
#define HAVE_avx2_pbroadcastv8hi (TARGET_AVX2)
#define HAVE_avx2_pbroadcastv8si ((TARGET_AVX2) && (TARGET_AVX))
#define HAVE_avx2_pbroadcastv4si (TARGET_AVX2)
#define HAVE_avx2_pbroadcastv4di ((TARGET_AVX2) && (TARGET_AVX))
#define HAVE_avx2_pbroadcastv2di (TARGET_AVX2)
#define HAVE_avx2_pbroadcastv32qi_1 (TARGET_AVX2)
#define HAVE_avx2_pbroadcastv16hi_1 (TARGET_AVX2)
#define HAVE_avx2_pbroadcastv8si_1 (TARGET_AVX2)
#define HAVE_avx2_pbroadcastv4di_1 (TARGET_AVX2)
#define HAVE_avx2_permvarv8si (TARGET_AVX2 && 1)
#define HAVE_avx2_permvarv8si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
#define HAVE_avx2_permvarv8sf (TARGET_AVX2 && 1)
#define HAVE_avx2_permvarv8sf_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
#define HAVE_avx512f_permvarv16si ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_permvarv16si_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx512f_permvarv16sf ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_permvarv16sf_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx512f_permvarv8di ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_permvarv8di_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx512f_permvarv8df ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_permvarv8df_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx2_permvarv4di ((TARGET_AVX2 && 1) && (TARGET_AVX512VL))
#define HAVE_avx2_permvarv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512VL)))
#define HAVE_avx2_permvarv4df ((TARGET_AVX2 && 1) && (TARGET_AVX512VL))
#define HAVE_avx2_permvarv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_permvarv64qi (TARGET_AVX512VBMI && 1)
#define HAVE_avx512bw_permvarv64qi_mask ((TARGET_AVX512F) && (TARGET_AVX512VBMI && (64 == 64 || TARGET_AVX512VL)))
#define HAVE_avx512vl_permvarv16qi ((TARGET_AVX512VBMI && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_permvarv16qi_mask ((TARGET_AVX512F) && ((TARGET_AVX512VBMI && (16 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_permvarv32qi ((TARGET_AVX512VBMI && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_permvarv32qi_mask ((TARGET_AVX512F) && ((TARGET_AVX512VBMI && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_permvarv8hi ((TARGET_AVX512BW && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_permvarv8hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW && (16 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_permvarv16hi ((TARGET_AVX512BW && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_permvarv16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_permvarv32hi (TARGET_AVX512BW && 1)
#define HAVE_avx512bw_permvarv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW && (64 == 64 || TARGET_AVX512VL)))
#define HAVE_avx2_permv4di_1 (TARGET_AVX2 && 1)
#define HAVE_avx2_permv4di_1_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
#define HAVE_avx2_permv4df_1 (TARGET_AVX2 && 1)
#define HAVE_avx2_permv4df_1_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
#define HAVE_avx512f_permv8di_1 ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_permv8di_1_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx512f_permv8df_1 ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_permv8df_1_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx2_permv2ti (TARGET_AVX2)
#define HAVE_avx2_vec_dupv4df (TARGET_AVX2)
#define HAVE_avx512f_vec_dupv16si_1 (TARGET_AVX512F)
#define HAVE_avx512f_vec_dupv8di_1 (TARGET_AVX512F)
#define HAVE_avx512bw_vec_dupv32hi_1 ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512bw_vec_dupv64qi_1 ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512f_vec_dupv16si (TARGET_AVX512F)
#define HAVE_avx512f_vec_dupv16si_mask (TARGET_AVX512F)
#define HAVE_avx512vl_vec_dupv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vec_dupv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_vec_dupv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vec_dupv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_vec_dupv8di (TARGET_AVX512F)
#define HAVE_avx512f_vec_dupv8di_mask (TARGET_AVX512F)
#define HAVE_avx512vl_vec_dupv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vec_dupv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_vec_dupv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vec_dupv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_vec_dupv16sf (TARGET_AVX512F)
#define HAVE_avx512f_vec_dupv16sf_mask (TARGET_AVX512F)
#define HAVE_avx512vl_vec_dupv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vec_dupv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_vec_dupv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vec_dupv4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_vec_dupv8df (TARGET_AVX512F)
#define HAVE_avx512f_vec_dupv8df_mask (TARGET_AVX512F)
#define HAVE_avx512vl_vec_dupv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vec_dupv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_vec_dupv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vec_dupv2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_vec_dupv64qi (TARGET_AVX512BW)
#define HAVE_avx512bw_vec_dupv64qi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512vl_vec_dupv16qi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vec_dupv16qi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_vec_dupv32qi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vec_dupv32qi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_vec_dupv32hi (TARGET_AVX512BW)
#define HAVE_avx512bw_vec_dupv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512vl_vec_dupv16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vec_dupv16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_vec_dupv8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vec_dupv8hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512f_broadcastv16sf_mask (TARGET_AVX512F)
#define HAVE_avx512f_broadcastv16si_mask (TARGET_AVX512F)
#define HAVE_avx512f_broadcastv8df_mask (TARGET_AVX512F)
#define HAVE_avx512f_broadcastv8di_mask (TARGET_AVX512F)
#define HAVE_avx512bw_vec_dup_gprv64qi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512vl_vec_dup_gprv16qi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_vec_dup_gprv32qi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_vec_dup_gprv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512vl_vec_dup_gprv16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_vec_dup_gprv8hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512f_vec_dup_gprv16si_mask (TARGET_AVX512F)
#define HAVE_avx512vl_vec_dup_gprv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_vec_dup_gprv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_vec_dup_gprv8di_mask (TARGET_AVX512F)
#define HAVE_avx512vl_vec_dup_gprv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_vec_dup_gprv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_vec_dup_gprv16sf_mask (TARGET_AVX512F)
#define HAVE_avx512vl_vec_dup_gprv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_vec_dup_gprv4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_vec_dup_gprv8df_mask (TARGET_AVX512F)
#define HAVE_avx512vl_vec_dup_gprv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_vec_dup_gprv2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_vec_dupv4sf (TARGET_SSE)
#define HAVE_avx2_vbroadcasti128_v32qi (TARGET_AVX2)
#define HAVE_avx2_vbroadcasti128_v16hi (TARGET_AVX2)
#define HAVE_avx2_vbroadcasti128_v8si (TARGET_AVX2)
#define HAVE_avx2_vbroadcasti128_v4di (TARGET_AVX2)
#define HAVE_vec_dupv8si (TARGET_AVX)
#define HAVE_vec_dupv8sf (TARGET_AVX)
#define HAVE_vec_dupv4di (TARGET_AVX)
#define HAVE_vec_dupv4df (TARGET_AVX)
#define HAVE_avx_vbroadcastf128_v32qi (TARGET_AVX)
#define HAVE_avx_vbroadcastf128_v16hi (TARGET_AVX)
#define HAVE_avx_vbroadcastf128_v8si (TARGET_AVX)
#define HAVE_avx_vbroadcastf128_v4di (TARGET_AVX)
#define HAVE_avx_vbroadcastf128_v8sf (TARGET_AVX)
#define HAVE_avx_vbroadcastf128_v4df (TARGET_AVX)
#define HAVE_avx512dq_broadcastv16si_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_avx512dq_broadcastv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_avx512dq_broadcastv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_avx512dq_broadcastv16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_avx512dq_broadcastv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_broadcastv8si_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_broadcastv8sf_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512dq_broadcastv16sf_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_avx512dq_broadcastv16si_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_avx512dq_broadcastv8di_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_avx512dq_broadcastv8df_mask_1 ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_avx512dq_broadcastv4di_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_avx512dq_broadcastv4df_mask_1 ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_avx512cd_maskb_vec_dupv8di (TARGET_AVX512CD)
#define HAVE_avx512cd_maskb_vec_dupv4di ((TARGET_AVX512CD) && (TARGET_AVX512VL))
#define HAVE_avx512cd_maskb_vec_dupv2di ((TARGET_AVX512CD) && (TARGET_AVX512VL))
#define HAVE_avx512cd_maskw_vec_dupv16si (TARGET_AVX512CD)
#define HAVE_avx512cd_maskw_vec_dupv8si ((TARGET_AVX512CD) && (TARGET_AVX512VL))
#define HAVE_avx512cd_maskw_vec_dupv4si ((TARGET_AVX512CD) && (TARGET_AVX512VL))
#define HAVE_avx512f_vpermilvarv16sf3 ((TARGET_AVX && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_vpermilvarv16sf3_mask ((TARGET_AVX512F) && ((TARGET_AVX && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx_vpermilvarv8sf3 ((TARGET_AVX && 1) && (TARGET_AVX))
#define HAVE_avx_vpermilvarv8sf3_mask ((TARGET_AVX512F) && ((TARGET_AVX && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX)))
#define HAVE_avx_vpermilvarv4sf3 (TARGET_AVX && 1)
#define HAVE_avx_vpermilvarv4sf3_mask ((TARGET_AVX512F) && (TARGET_AVX && (16 == 64 || TARGET_AVX512VL)))
#define HAVE_avx512f_vpermilvarv8df3 ((TARGET_AVX && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_vpermilvarv8df3_mask ((TARGET_AVX512F) && ((TARGET_AVX && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx_vpermilvarv4df3 ((TARGET_AVX && 1) && (TARGET_AVX))
#define HAVE_avx_vpermilvarv4df3_mask ((TARGET_AVX512F) && ((TARGET_AVX && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX)))
#define HAVE_avx_vpermilvarv2df3 ((TARGET_AVX && 1) && (TARGET_SSE2))
#define HAVE_avx_vpermilvarv2df3_mask ((TARGET_AVX512F) && ((TARGET_AVX && (16 == 64 || TARGET_AVX512VL)) && (TARGET_SSE2)))
#define HAVE_avx512f_vpermi2varv16si3 (TARGET_AVX512F)
#define HAVE_avx512f_vpermi2varv16si3_maskz_1 (TARGET_AVX512F)
#define HAVE_avx512f_vpermi2varv16sf3 (TARGET_AVX512F)
#define HAVE_avx512f_vpermi2varv16sf3_maskz_1 (TARGET_AVX512F)
#define HAVE_avx512f_vpermi2varv8di3 (TARGET_AVX512F)
#define HAVE_avx512f_vpermi2varv8di3_maskz_1 (TARGET_AVX512F)
#define HAVE_avx512f_vpermi2varv8df3 (TARGET_AVX512F)
#define HAVE_avx512f_vpermi2varv8df3_maskz_1 (TARGET_AVX512F)
#define HAVE_avx512vl_vpermi2varv8si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv8si3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv8sf3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv8sf3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv4di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv4di3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv4df3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv4df3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv4si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv4si3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv4sf3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv4sf3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv2di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv2di3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv2df3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv2df3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512bw_vpermi2varv64qi3 (TARGET_AVX512VBMI)
#define HAVE_avx512bw_vpermi2varv64qi3_maskz_1 (TARGET_AVX512VBMI)
#define HAVE_avx512vl_vpermi2varv16qi3 ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv16qi3_maskz_1 ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv32qi3 ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv32qi3_maskz_1 ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv8hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv8hi3_maskz_1 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv16hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv16hi3_maskz_1 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512bw_vpermi2varv32hi3 (TARGET_AVX512BW)
#define HAVE_avx512bw_vpermi2varv32hi3_maskz_1 (TARGET_AVX512BW)
#define HAVE_avx512f_vpermi2varv16si3_mask (TARGET_AVX512F)
#define HAVE_avx512f_vpermi2varv16sf3_mask (TARGET_AVX512F)
#define HAVE_avx512f_vpermi2varv8di3_mask (TARGET_AVX512F)
#define HAVE_avx512f_vpermi2varv8df3_mask (TARGET_AVX512F)
#define HAVE_avx512vl_vpermi2varv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv8sf3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv4df3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv4sf3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv2df3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512bw_vpermi2varv64qi3_mask (TARGET_AVX512VBMI)
#define HAVE_avx512vl_vpermi2varv16qi3_mask ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv32qi3_mask ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv8hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv16hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512bw_vpermi2varv32hi3_mask (TARGET_AVX512BW)
#define HAVE_avx512f_vpermt2varv16si3 (TARGET_AVX512F)
#define HAVE_avx512f_vpermt2varv16si3_maskz_1 (TARGET_AVX512F)
#define HAVE_avx512f_vpermt2varv16sf3 (TARGET_AVX512F)
#define HAVE_avx512f_vpermt2varv16sf3_maskz_1 (TARGET_AVX512F)
#define HAVE_avx512f_vpermt2varv8di3 (TARGET_AVX512F)
#define HAVE_avx512f_vpermt2varv8di3_maskz_1 (TARGET_AVX512F)
#define HAVE_avx512f_vpermt2varv8df3 (TARGET_AVX512F)
#define HAVE_avx512f_vpermt2varv8df3_maskz_1 (TARGET_AVX512F)
#define HAVE_avx512vl_vpermt2varv8si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv8si3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv8sf3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv8sf3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv4di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv4di3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv4df3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv4df3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv4si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv4si3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv4sf3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv4sf3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv2di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv2di3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv2df3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv2df3_maskz_1 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512bw_vpermt2varv64qi3 (TARGET_AVX512VBMI)
#define HAVE_avx512bw_vpermt2varv64qi3_maskz_1 (TARGET_AVX512VBMI)
#define HAVE_avx512vl_vpermt2varv16qi3 ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv16qi3_maskz_1 ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv32qi3 ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv32qi3_maskz_1 ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv8hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv8hi3_maskz_1 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv16hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv16hi3_maskz_1 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512bw_vpermt2varv32hi3 (TARGET_AVX512BW)
#define HAVE_avx512bw_vpermt2varv32hi3_maskz_1 (TARGET_AVX512BW)
#define HAVE_avx512f_vpermt2varv16si3_mask (TARGET_AVX512F)
#define HAVE_avx512f_vpermt2varv16sf3_mask (TARGET_AVX512F)
#define HAVE_avx512f_vpermt2varv8di3_mask (TARGET_AVX512F)
#define HAVE_avx512f_vpermt2varv8df3_mask (TARGET_AVX512F)
#define HAVE_avx512vl_vpermt2varv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv8sf3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv4df3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv4sf3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv2df3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512bw_vpermt2varv64qi3_mask (TARGET_AVX512VBMI)
#define HAVE_avx512vl_vpermt2varv16qi3_mask ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv32qi3_mask ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv8hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv16hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512bw_vpermt2varv32hi3_mask (TARGET_AVX512BW)
#define HAVE_vec_set_lo_v4di (TARGET_AVX)
#define HAVE_vec_set_lo_v4di_mask ((TARGET_AVX512F) && (TARGET_AVX))
#define HAVE_vec_set_lo_v4df (TARGET_AVX)
#define HAVE_vec_set_lo_v4df_mask ((TARGET_AVX512F) && (TARGET_AVX))
#define HAVE_vec_set_hi_v4di (TARGET_AVX)
#define HAVE_vec_set_hi_v4di_mask ((TARGET_AVX512F) && (TARGET_AVX))
#define HAVE_vec_set_hi_v4df (TARGET_AVX)
#define HAVE_vec_set_hi_v4df_mask ((TARGET_AVX512F) && (TARGET_AVX))
#define HAVE_vec_set_lo_v8si (TARGET_AVX)
#define HAVE_vec_set_lo_v8si_mask ((TARGET_AVX512F) && (TARGET_AVX))
#define HAVE_vec_set_lo_v8sf (TARGET_AVX)
#define HAVE_vec_set_lo_v8sf_mask ((TARGET_AVX512F) && (TARGET_AVX))
#define HAVE_vec_set_hi_v8si (TARGET_AVX)
#define HAVE_vec_set_hi_v8si_mask ((TARGET_AVX512F) && (TARGET_AVX))
#define HAVE_vec_set_hi_v8sf (TARGET_AVX)
#define HAVE_vec_set_hi_v8sf_mask ((TARGET_AVX512F) && (TARGET_AVX))
#define HAVE_vec_set_lo_v16hi (TARGET_AVX)
#define HAVE_vec_set_hi_v16hi (TARGET_AVX)
#define HAVE_vec_set_lo_v32qi (TARGET_AVX)
#define HAVE_vec_set_hi_v32qi (TARGET_AVX)
#define HAVE_avx_maskloadps (TARGET_AVX)
#define HAVE_avx_maskloadpd (TARGET_AVX)
#define HAVE_avx_maskloadps256 (TARGET_AVX)
#define HAVE_avx_maskloadpd256 (TARGET_AVX)
#define HAVE_avx2_maskloadd ((TARGET_AVX) && (TARGET_AVX2))
#define HAVE_avx2_maskloadq ((TARGET_AVX) && (TARGET_AVX2))
#define HAVE_avx2_maskloadd256 ((TARGET_AVX) && (TARGET_AVX2))
#define HAVE_avx2_maskloadq256 ((TARGET_AVX) && (TARGET_AVX2))
#define HAVE_avx_maskstoreps (TARGET_AVX)
#define HAVE_avx_maskstorepd (TARGET_AVX)
#define HAVE_avx_maskstoreps256 (TARGET_AVX)
#define HAVE_avx_maskstorepd256 (TARGET_AVX)
#define HAVE_avx2_maskstored ((TARGET_AVX) && (TARGET_AVX2))
#define HAVE_avx2_maskstoreq ((TARGET_AVX) && (TARGET_AVX2))
#define HAVE_avx2_maskstored256 ((TARGET_AVX) && (TARGET_AVX2))
#define HAVE_avx2_maskstoreq256 ((TARGET_AVX) && (TARGET_AVX2))
#define HAVE_avx_si256_si (TARGET_AVX)
#define HAVE_avx_ps256_ps (TARGET_AVX)
#define HAVE_avx_pd256_pd (TARGET_AVX)
#define HAVE_avx2_ashrvv4si (TARGET_AVX2 && 1)
#define HAVE_avx2_ashrvv4si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (16 == 64 || TARGET_AVX512VL)))
#define HAVE_avx2_ashrvv8si (TARGET_AVX2 && 1)
#define HAVE_avx2_ashrvv8si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
#define HAVE_avx512f_ashrvv16si ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_ashrvv16si_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx2_ashrvv2di ((TARGET_AVX2 && 1) && (TARGET_AVX512VL))
#define HAVE_avx2_ashrvv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (16 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512VL)))
#define HAVE_avx2_ashrvv4di ((TARGET_AVX2 && 1) && (TARGET_AVX512VL))
#define HAVE_avx2_ashrvv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512VL)))
#define HAVE_avx512f_ashrvv8di ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_ashrvv8di_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx512vl_ashrvv8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_ashrvv8hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_ashrvv16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_ashrvv16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_ashrvv32hi (TARGET_AVX512BW)
#define HAVE_avx512bw_ashrvv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512f_ashlvv16si ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_ashlvv16si_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx512f_lshrvv16si ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_lshrvv16si_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx2_ashlvv8si (TARGET_AVX2 && 1)
#define HAVE_avx2_ashlvv8si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
#define HAVE_avx2_lshrvv8si (TARGET_AVX2 && 1)
#define HAVE_avx2_lshrvv8si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
#define HAVE_avx2_ashlvv4si (TARGET_AVX2 && 1)
#define HAVE_avx2_ashlvv4si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (16 == 64 || TARGET_AVX512VL)))
#define HAVE_avx2_lshrvv4si (TARGET_AVX2 && 1)
#define HAVE_avx2_lshrvv4si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (16 == 64 || TARGET_AVX512VL)))
#define HAVE_avx512f_ashlvv8di ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_ashlvv8di_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx512f_lshrvv8di ((TARGET_AVX2 && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_lshrvv8di_mask ((TARGET_AVX512F) && ((TARGET_AVX2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx2_ashlvv4di (TARGET_AVX2 && 1)
#define HAVE_avx2_ashlvv4di_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
#define HAVE_avx2_lshrvv4di (TARGET_AVX2 && 1)
#define HAVE_avx2_lshrvv4di_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (32 == 64 || TARGET_AVX512VL)))
#define HAVE_avx2_ashlvv2di (TARGET_AVX2 && 1)
#define HAVE_avx2_ashlvv2di_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (16 == 64 || TARGET_AVX512VL)))
#define HAVE_avx2_lshrvv2di (TARGET_AVX2 && 1)
#define HAVE_avx2_lshrvv2di_mask ((TARGET_AVX512F) && (TARGET_AVX2 && (16 == 64 || TARGET_AVX512VL)))
#define HAVE_avx512vl_ashlvv8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_ashlvv8hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_lshrvv8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_lshrvv8hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_ashlvv16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_ashlvv16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_lshrvv16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_lshrvv16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_ashlvv32hi (TARGET_AVX512BW)
#define HAVE_avx512bw_ashlvv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512bw_lshrvv32hi (TARGET_AVX512BW)
#define HAVE_avx512bw_lshrvv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx_vec_concatv32qi (TARGET_AVX)
#define HAVE_avx_vec_concatv16hi (TARGET_AVX)
#define HAVE_avx_vec_concatv8si (TARGET_AVX)
#define HAVE_avx_vec_concatv4di (TARGET_AVX)
#define HAVE_avx_vec_concatv8sf (TARGET_AVX)
#define HAVE_avx_vec_concatv4df (TARGET_AVX)
#define HAVE_avx_vec_concatv64qi ((TARGET_AVX) && (TARGET_AVX512F))
#define HAVE_avx_vec_concatv32hi ((TARGET_AVX) && (TARGET_AVX512F))
#define HAVE_avx_vec_concatv16si ((TARGET_AVX) && (TARGET_AVX512F))
#define HAVE_avx_vec_concatv8di ((TARGET_AVX) && (TARGET_AVX512F))
#define HAVE_avx_vec_concatv16sf ((TARGET_AVX) && (TARGET_AVX512F))
#define HAVE_avx_vec_concatv8df ((TARGET_AVX) && (TARGET_AVX512F))
#define HAVE_vcvtph2ps (TARGET_F16C || TARGET_AVX512VL)
#define HAVE_vcvtph2ps_mask ((TARGET_AVX512F) && (TARGET_F16C || TARGET_AVX512VL))
#define HAVE_vcvtph2ps256 (TARGET_F16C || TARGET_AVX512VL)
#define HAVE_vcvtph2ps256_mask ((TARGET_AVX512F) && (TARGET_F16C || TARGET_AVX512VL))
#define HAVE_avx512f_vcvtph2ps512_mask (TARGET_AVX512F)
#define HAVE_avx512f_vcvtph2ps512_mask_round (TARGET_AVX512F)
#define HAVE_vcvtps2ph256 (TARGET_F16C || TARGET_AVX512VL)
#define HAVE_vcvtps2ph256_mask ((TARGET_AVX512F) && (TARGET_F16C || TARGET_AVX512VL))
#define HAVE_avx512f_vcvtps2ph512_mask (TARGET_AVX512F)
#define HAVE_avx512f_compressv16si_mask (TARGET_AVX512F)
#define HAVE_avx512f_compressv16sf_mask (TARGET_AVX512F)
#define HAVE_avx512f_compressv8di_mask (TARGET_AVX512F)
#define HAVE_avx512f_compressv8df_mask (TARGET_AVX512F)
#define HAVE_avx512vl_compressv8si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_compressv8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_compressv4di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_compressv4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_compressv4si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_compressv4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_compressv2di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_compressv2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_compressstorev16si_mask (TARGET_AVX512F)
#define HAVE_avx512f_compressstorev16sf_mask (TARGET_AVX512F)
#define HAVE_avx512f_compressstorev8di_mask (TARGET_AVX512F)
#define HAVE_avx512f_compressstorev8df_mask (TARGET_AVX512F)
#define HAVE_avx512vl_compressstorev8si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_compressstorev8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_compressstorev4di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_compressstorev4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_compressstorev4si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_compressstorev4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_compressstorev2di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_compressstorev2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_expandv16si_mask (TARGET_AVX512F)
#define HAVE_avx512f_expandv16sf_mask (TARGET_AVX512F)
#define HAVE_avx512f_expandv8di_mask (TARGET_AVX512F)
#define HAVE_avx512f_expandv8df_mask (TARGET_AVX512F)
#define HAVE_avx512vl_expandv8si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_expandv8sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_expandv4di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_expandv4df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_expandv4si_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_expandv4sf_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_expandv2di_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_expandv2df_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512dq_rangepv16sf (TARGET_AVX512DQ && 1)
#define HAVE_avx512dq_rangepv16sf_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)))
#define HAVE_avx512dq_rangepv16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
#define HAVE_avx512dq_rangepv16sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode))))
#define HAVE_avx512dq_rangepv8sf ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
#define HAVE_avx512dq_rangepv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_avx512dq_rangepv4sf ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
#define HAVE_avx512dq_rangepv4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_avx512dq_rangepv8df (TARGET_AVX512DQ && 1)
#define HAVE_avx512dq_rangepv8df_round ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)))
#define HAVE_avx512dq_rangepv8df_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ && 1))
#define HAVE_avx512dq_rangepv8df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512DQ && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode))))
#define HAVE_avx512dq_rangepv4df ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
#define HAVE_avx512dq_rangepv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_avx512dq_rangepv2df ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL))
#define HAVE_avx512dq_rangepv2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ && 1) && (TARGET_AVX512VL)))
#define HAVE_avx512dq_rangesv4sf (TARGET_AVX512DQ)
#define HAVE_avx512dq_rangesv4sf_round ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_avx512dq_rangesv2df ((TARGET_AVX512DQ) && (TARGET_SSE2))
#define HAVE_avx512dq_rangesv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_SSE2)))
#define HAVE_avx512dq_fpclassv16sf (TARGET_AVX512DQ)
#define HAVE_avx512dq_fpclassv16sf_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_avx512dq_fpclassv8sf ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
#define HAVE_avx512dq_fpclassv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_avx512dq_fpclassv4sf ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
#define HAVE_avx512dq_fpclassv4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_avx512dq_fpclassv8df (TARGET_AVX512DQ)
#define HAVE_avx512dq_fpclassv8df_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_avx512dq_fpclassv4df ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
#define HAVE_avx512dq_fpclassv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_avx512dq_fpclassv2df ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
#define HAVE_avx512dq_fpclassv2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512DQ) && (TARGET_AVX512VL)))
#define HAVE_avx512dq_vmfpclassv4sf (TARGET_AVX512DQ)
#define HAVE_avx512dq_vmfpclassv2df ((TARGET_AVX512DQ) && (TARGET_SSE2))
#define HAVE_avx512f_getmantv16sf (TARGET_AVX512F)
#define HAVE_avx512f_getmantv16sf_round (TARGET_AVX512F)
#define HAVE_avx512f_getmantv16sf_mask (TARGET_AVX512F)
#define HAVE_avx512f_getmantv16sf_mask_round (TARGET_AVX512F)
#define HAVE_avx512vl_getmantv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_getmantv8sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_getmantv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_getmantv8sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512vl_getmantv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_getmantv4sf_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_getmantv4sf_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_getmantv4sf_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512f_getmantv8df (TARGET_AVX512F)
#define HAVE_avx512f_getmantv8df_round (TARGET_AVX512F)
#define HAVE_avx512f_getmantv8df_mask (TARGET_AVX512F)
#define HAVE_avx512f_getmantv8df_mask_round (TARGET_AVX512F)
#define HAVE_avx512vl_getmantv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_getmantv4df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_getmantv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_getmantv4df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512vl_getmantv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_getmantv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_getmantv2df_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_getmantv2df_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL))))
#define HAVE_avx512f_vgetmantv4sf (TARGET_AVX512F)
#define HAVE_avx512f_vgetmantv4sf_round (TARGET_AVX512F)
#define HAVE_avx512f_vgetmantv2df ((TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_avx512f_vgetmantv2df_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
#define HAVE_avx512bw_dbpsadbwv8hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_dbpsadbwv16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_dbpsadbwv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_clzv16si2 (TARGET_AVX512CD)
#define HAVE_clzv16si2_mask ((TARGET_AVX512F) && (TARGET_AVX512CD))
#define HAVE_clzv8si2 ((TARGET_AVX512CD) && (TARGET_AVX512VL))
#define HAVE_clzv8si2_mask ((TARGET_AVX512F) && ((TARGET_AVX512CD) && (TARGET_AVX512VL)))
#define HAVE_clzv4si2 ((TARGET_AVX512CD) && (TARGET_AVX512VL))
#define HAVE_clzv4si2_mask ((TARGET_AVX512F) && ((TARGET_AVX512CD) && (TARGET_AVX512VL)))
#define HAVE_clzv8di2 (TARGET_AVX512CD)
#define HAVE_clzv8di2_mask ((TARGET_AVX512F) && (TARGET_AVX512CD))
#define HAVE_clzv4di2 ((TARGET_AVX512CD) && (TARGET_AVX512VL))
#define HAVE_clzv4di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512CD) && (TARGET_AVX512VL)))
#define HAVE_clzv2di2 ((TARGET_AVX512CD) && (TARGET_AVX512VL))
#define HAVE_clzv2di2_mask ((TARGET_AVX512F) && ((TARGET_AVX512CD) && (TARGET_AVX512VL)))
#define HAVE_conflictv16si_mask ((TARGET_AVX512F) && (TARGET_AVX512CD))
#define HAVE_conflictv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512CD) && (TARGET_AVX512VL)))
#define HAVE_conflictv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512CD) && (TARGET_AVX512VL)))
#define HAVE_conflictv8di_mask ((TARGET_AVX512F) && (TARGET_AVX512CD))
#define HAVE_conflictv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512CD) && (TARGET_AVX512VL)))
#define HAVE_conflictv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX512CD) && (TARGET_AVX512VL)))
#define HAVE_sha1msg1 (TARGET_SHA)
#define HAVE_sha1msg2 (TARGET_SHA)
#define HAVE_sha1nexte (TARGET_SHA)
#define HAVE_sha1rnds4 (TARGET_SHA)
#define HAVE_sha256msg1 (TARGET_SHA)
#define HAVE_sha256msg2 (TARGET_SHA)
#define HAVE_sha256rnds2 (TARGET_SHA)
#define HAVE_avx512f_si512_si (TARGET_AVX512F)
#define HAVE_avx512f_ps512_ps (TARGET_AVX512F)
#define HAVE_avx512f_pd512_pd (TARGET_AVX512F)
#define HAVE_avx512f_si512_256si (TARGET_AVX512F)
#define HAVE_avx512f_ps512_256ps (TARGET_AVX512F)
#define HAVE_avx512f_pd512_256pd (TARGET_AVX512F)
#define HAVE_vpamdd52luqv8di (TARGET_AVX512IFMA)
#define HAVE_vpamdd52luqv8di_maskz_1 (TARGET_AVX512IFMA)
#define HAVE_vpamdd52huqv8di (TARGET_AVX512IFMA)
#define HAVE_vpamdd52huqv8di_maskz_1 (TARGET_AVX512IFMA)
#define HAVE_vpamdd52luqv4di ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
#define HAVE_vpamdd52luqv4di_maskz_1 ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
#define HAVE_vpamdd52huqv4di ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
#define HAVE_vpamdd52huqv4di_maskz_1 ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
#define HAVE_vpamdd52luqv2di ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
#define HAVE_vpamdd52luqv2di_maskz_1 ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
#define HAVE_vpamdd52huqv2di ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
#define HAVE_vpamdd52huqv2di_maskz_1 ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
#define HAVE_vpamdd52luqv8di_mask (TARGET_AVX512IFMA)
#define HAVE_vpamdd52huqv8di_mask (TARGET_AVX512IFMA)
#define HAVE_vpamdd52luqv4di_mask ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
#define HAVE_vpamdd52huqv4di_mask ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
#define HAVE_vpamdd52luqv2di_mask ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
#define HAVE_vpamdd52huqv2di_mask ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
#define HAVE_vpmultishiftqbv64qi (TARGET_AVX512VBMI)
#define HAVE_vpmultishiftqbv64qi_mask ((TARGET_AVX512F) && (TARGET_AVX512VBMI))
#define HAVE_vpmultishiftqbv16qi ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_vpmultishiftqbv16qi_mask ((TARGET_AVX512F) && ((TARGET_AVX512VBMI) && (TARGET_AVX512VL)))
#define HAVE_vpmultishiftqbv32qi ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_vpmultishiftqbv32qi_mask ((TARGET_AVX512F) && ((TARGET_AVX512VBMI) && (TARGET_AVX512VL)))
#define HAVE_mfence_sse2 (TARGET_64BIT || TARGET_SSE2)
#define HAVE_mfence_nosse (!(TARGET_64BIT || TARGET_SSE2))
#define HAVE_atomic_loaddi_fpu (!TARGET_64BIT && (TARGET_80387 || TARGET_SSE))
#define HAVE_atomic_storeqi_1 1
#define HAVE_atomic_storehi_1 1
#define HAVE_atomic_storesi_1 1
#define HAVE_atomic_storedi_1 (TARGET_64BIT)
#define HAVE_atomic_storedi_fpu (!TARGET_64BIT && (TARGET_80387 || TARGET_SSE))
#define HAVE_loaddi_via_fpu (TARGET_80387)
#define HAVE_storedi_via_fpu (TARGET_80387)
#define HAVE_atomic_compare_and_swapdi_doubleword ((TARGET_CMPXCHG8B) && (!TARGET_64BIT))
#define HAVE_atomic_compare_and_swapti_doubleword ((TARGET_CMPXCHG16B) && (TARGET_64BIT))
#define HAVE_atomic_compare_and_swapqi_1 (TARGET_CMPXCHG)
#define HAVE_atomic_compare_and_swaphi_1 (TARGET_CMPXCHG)
#define HAVE_atomic_compare_and_swapsi_1 (TARGET_CMPXCHG)
#define HAVE_atomic_compare_and_swapdi_1 ((TARGET_CMPXCHG) && (TARGET_64BIT))
#define HAVE_atomic_fetch_addqi (TARGET_XADD)
#define HAVE_atomic_fetch_addhi (TARGET_XADD)
#define HAVE_atomic_fetch_addsi (TARGET_XADD)
#define HAVE_atomic_fetch_adddi ((TARGET_XADD) && (TARGET_64BIT))
#define HAVE_atomic_exchangeqi 1
#define HAVE_atomic_exchangehi 1
#define HAVE_atomic_exchangesi 1
#define HAVE_atomic_exchangedi (TARGET_64BIT)
#define HAVE_atomic_addqi 1
#define HAVE_atomic_addhi 1
#define HAVE_atomic_addsi 1
#define HAVE_atomic_adddi (TARGET_64BIT)
#define HAVE_atomic_subqi 1
#define HAVE_atomic_subhi 1
#define HAVE_atomic_subsi 1
#define HAVE_atomic_subdi (TARGET_64BIT)
#define HAVE_atomic_andqi 1
#define HAVE_atomic_orqi 1
#define HAVE_atomic_xorqi 1
#define HAVE_atomic_andhi 1
#define HAVE_atomic_orhi 1
#define HAVE_atomic_xorhi 1
#define HAVE_atomic_andsi 1
#define HAVE_atomic_orsi 1
#define HAVE_atomic_xorsi 1
#define HAVE_atomic_anddi (TARGET_64BIT)
#define HAVE_atomic_ordi (TARGET_64BIT)
#define HAVE_atomic_xordi (TARGET_64BIT)
#define HAVE_cbranchqi4 (TARGET_QIMODE_MATH)
#define HAVE_cbranchhi4 (TARGET_HIMODE_MATH)
#define HAVE_cbranchsi4 1
#define HAVE_cbranchdi4 1
#define HAVE_cbranchti4 (TARGET_64BIT)
#define HAVE_cstoreqi4 (TARGET_QIMODE_MATH)
#define HAVE_cstorehi4 (TARGET_HIMODE_MATH)
#define HAVE_cstoresi4 1
#define HAVE_cstoredi4 (TARGET_64BIT)
#define HAVE_cmpsi_1 1
#define HAVE_cmpdi_1 (TARGET_64BIT)
#define HAVE_cmpqi_ext_3 1
#define HAVE_cbranchxf4 (TARGET_80387)
#define HAVE_cstorexf4 (TARGET_80387)
#define HAVE_cbranchsf4 (TARGET_80387 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
#define HAVE_cbranchdf4 (TARGET_80387 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
#define HAVE_cstoresf4 (TARGET_80387 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
#define HAVE_cstoredf4 (TARGET_80387 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
#define HAVE_cbranchcc4 1
#define HAVE_cstorecc4 1
#define HAVE_reload_noff_store (TARGET_64BIT)
#define HAVE_reload_noff_load (TARGET_64BIT)
#define HAVE_movxi (TARGET_AVX512F)
#define HAVE_movoi (TARGET_AVX)
#define HAVE_movti (TARGET_64BIT || TARGET_SSE)
#define HAVE_movcdi 1
#define HAVE_movqi 1
#define HAVE_movhi 1
#define HAVE_movsi 1
#define HAVE_movdi 1
#define HAVE_movstrictqi 1
#define HAVE_movstricthi 1
#define HAVE_extvhi 1
#define HAVE_extvsi 1
#define HAVE_extzvhi 1
#define HAVE_extzvsi 1
#define HAVE_extzvdi (TARGET_64BIT)
#define HAVE_insvhi 1
#define HAVE_insvsi 1
#define HAVE_insvdi (TARGET_64BIT)
#define HAVE_movtf (TARGET_64BIT || TARGET_SSE)
#define HAVE_movsf 1
#define HAVE_movdf 1
#define HAVE_movxf 1
#define HAVE_zero_extendsidi2 1
#define HAVE_zero_extendqisi2 1
#define HAVE_zero_extendhisi2 1
#define HAVE_zero_extendqihi2 1
#define HAVE_extendsidi2 1
#define HAVE_extendsfdf2 (TARGET_80387 || (TARGET_SSE2 && TARGET_SSE_MATH))
#define HAVE_extendsfxf2 (TARGET_80387)
#define HAVE_extenddfxf2 (TARGET_80387)
#define HAVE_truncdfsf2 (TARGET_80387 || (TARGET_SSE2 && TARGET_SSE_MATH))
#define HAVE_truncdfsf2_with_temp 1
#define HAVE_truncxfsf2 (TARGET_80387)
#define HAVE_truncxfdf2 (TARGET_80387)
#define HAVE_fix_truncxfdi2 (TARGET_80387)
#define HAVE_fix_truncsfdi2 (TARGET_80387 || (TARGET_64BIT && SSE_FLOAT_MODE_P (SFmode)))
#define HAVE_fix_truncdfdi2 (TARGET_80387 || (TARGET_64BIT && SSE_FLOAT_MODE_P (DFmode)))
#define HAVE_fix_truncxfsi2 (TARGET_80387)
#define HAVE_fix_truncsfsi2 (TARGET_80387 || SSE_FLOAT_MODE_P (SFmode))
#define HAVE_fix_truncdfsi2 (TARGET_80387 || SSE_FLOAT_MODE_P (DFmode))
#define HAVE_fix_truncsfhi2 (TARGET_80387 \
&& !(SSE_FLOAT_MODE_P (SFmode) && (!TARGET_FISTTP || TARGET_SSE_MATH)))
#define HAVE_fix_truncdfhi2 (TARGET_80387 \
&& !(SSE_FLOAT_MODE_P (DFmode) && (!TARGET_FISTTP || TARGET_SSE_MATH)))
#define HAVE_fix_truncxfhi2 (TARGET_80387 \
&& !(SSE_FLOAT_MODE_P (XFmode) && (!TARGET_FISTTP || TARGET_SSE_MATH)))
#define HAVE_fixuns_truncsfsi2 (!TARGET_64BIT && TARGET_SSE2 && TARGET_SSE_MATH)
#define HAVE_fixuns_truncdfsi2 (!TARGET_64BIT && TARGET_SSE2 && TARGET_SSE_MATH)
#define HAVE_fixuns_truncsfhi2 (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH)
#define HAVE_fixuns_truncdfhi2 (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)
#define HAVE_floatsisf2 (TARGET_80387 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
#define HAVE_floatdisf2 ((TARGET_80387 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH)) && (TARGET_64BIT))
#define HAVE_floatsidf2 (TARGET_80387 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
#define HAVE_floatdidf2 ((TARGET_80387 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)) && (TARGET_64BIT))
#define HAVE_floatunsqisf2 (!TARGET_64BIT \
&& SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH)
#define HAVE_floatunshisf2 (!TARGET_64BIT \
&& SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH)
#define HAVE_floatunsqidf2 (!TARGET_64BIT \
&& SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)
#define HAVE_floatunshidf2 (!TARGET_64BIT \
&& SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)
#define HAVE_floatunssisf2 (!TARGET_64BIT \
&& ((TARGET_80387 && X87_ENABLE_FLOAT (SFmode, DImode) \
&& TARGET_SSE2 && TARGET_INTER_UNIT_MOVES_TO_VEC) \
|| (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH)))
#define HAVE_floatunssidf2 (!TARGET_64BIT \
&& ((TARGET_80387 && X87_ENABLE_FLOAT (DFmode, DImode) \
&& TARGET_SSE2 && TARGET_INTER_UNIT_MOVES_TO_VEC) \
|| (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)))
#define HAVE_floatunssixf2 (!TARGET_64BIT \
&& ((TARGET_80387 && X87_ENABLE_FLOAT (XFmode, DImode) \
&& TARGET_SSE2 && TARGET_INTER_UNIT_MOVES_TO_VEC) \
|| (SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH)))
#define HAVE_floatunsdisf2 (TARGET_64BIT && TARGET_SSE && TARGET_SSE_MATH)
#define HAVE_floatunsdidf2 ((TARGET_64BIT || TARGET_KEEPS_VECTOR_ALIGNED_STACK) \
&& TARGET_SSE2 && TARGET_SSE_MATH)
#define HAVE_addqi3 (TARGET_QIMODE_MATH)
#define HAVE_addhi3 (TARGET_HIMODE_MATH)
#define HAVE_addsi3 1
#define HAVE_adddi3 1
#define HAVE_addti3 (TARGET_64BIT)
#define HAVE_addvqi4 1
#define HAVE_addvhi4 1
#define HAVE_addvsi4 1
#define HAVE_addvdi4 (TARGET_64BIT)
#define HAVE_uaddvqi4 1
#define HAVE_uaddvhi4 1
#define HAVE_uaddvsi4 1
#define HAVE_uaddvdi4 (TARGET_64BIT)
#define HAVE_subqi3 (TARGET_QIMODE_MATH)
#define HAVE_subhi3 (TARGET_HIMODE_MATH)
#define HAVE_subsi3 1
#define HAVE_subdi3 1
#define HAVE_subti3 (TARGET_64BIT)
#define HAVE_subvqi4 1
#define HAVE_subvhi4 1
#define HAVE_subvsi4 1
#define HAVE_subvdi4 (TARGET_64BIT)
#define HAVE_usubvqi4 1
#define HAVE_usubvhi4 1
#define HAVE_usubvsi4 1
#define HAVE_usubvdi4 (TARGET_64BIT)
#define HAVE_addqi3_cconly_overflow (!(MEM_P (operands[0]) && MEM_P (operands[1])))
#define HAVE_addxf3 (TARGET_80387)
#define HAVE_subxf3 (TARGET_80387)
#define HAVE_addsf3 ((TARGET_80387 && X87_ENABLE_ARITH (SFmode)) \
|| (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
#define HAVE_subsf3 ((TARGET_80387 && X87_ENABLE_ARITH (SFmode)) \
|| (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
#define HAVE_adddf3 ((TARGET_80387 && X87_ENABLE_ARITH (DFmode)) \
|| (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
#define HAVE_subdf3 ((TARGET_80387 && X87_ENABLE_ARITH (DFmode)) \
|| (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
#define HAVE_mulhi3 (TARGET_HIMODE_MATH)
#define HAVE_mulsi3 1
#define HAVE_muldi3 (TARGET_64BIT)
#define HAVE_mulqi3 (TARGET_QIMODE_MATH)
#define HAVE_mulvhi4 1
#define HAVE_mulvsi4 1
#define HAVE_mulvdi4 (TARGET_64BIT)
#define HAVE_umulvhi4 1
#define HAVE_umulvsi4 1
#define HAVE_umulvdi4 (TARGET_64BIT)
#define HAVE_mulvqi4 (TARGET_QIMODE_MATH)
#define HAVE_umulvqi4 (TARGET_QIMODE_MATH)
#define HAVE_mulsidi3 (!TARGET_64BIT)
#define HAVE_umulsidi3 (!TARGET_64BIT)
#define HAVE_mulditi3 (TARGET_64BIT)
#define HAVE_umulditi3 (TARGET_64BIT)
#define HAVE_mulqihi3 (TARGET_QIMODE_MATH)
#define HAVE_umulqihi3 (TARGET_QIMODE_MATH)
#define HAVE_smulsi3_highpart 1
#define HAVE_umulsi3_highpart 1
#define HAVE_smuldi3_highpart (TARGET_64BIT)
#define HAVE_umuldi3_highpart (TARGET_64BIT)
#define HAVE_mulxf3 (TARGET_80387)
#define HAVE_mulsf3 ((TARGET_80387 && X87_ENABLE_ARITH (SFmode)) \
|| (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
#define HAVE_muldf3 ((TARGET_80387 && X87_ENABLE_ARITH (DFmode)) \
|| (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
#define HAVE_divxf3 (TARGET_80387)
#define HAVE_divsf3 ((TARGET_80387 && X87_ENABLE_ARITH (SFmode)) \
|| (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
#define HAVE_divdf3 ((TARGET_80387 && X87_ENABLE_ARITH (DFmode)) \
|| (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
#define HAVE_divmodhi4 (TARGET_HIMODE_MATH)
#define HAVE_divmodsi4 1
#define HAVE_divmoddi4 (TARGET_64BIT)
#define HAVE_divmodqi4 (TARGET_QIMODE_MATH)
#define HAVE_udivmodhi4 (TARGET_HIMODE_MATH)
#define HAVE_udivmodsi4 1
#define HAVE_udivmoddi4 (TARGET_64BIT)
#define HAVE_udivmodqi4 (TARGET_QIMODE_MATH)
#define HAVE_testsi_ccno_1 1
#define HAVE_testqi_ccz_1 1
#define HAVE_testdi_ccno_1 (TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1])))
#define HAVE_testqi_ext_ccno_0 1
#define HAVE_andqi3 (TARGET_QIMODE_MATH)
#define HAVE_andhi3 (TARGET_HIMODE_MATH)
#define HAVE_andsi3 1
#define HAVE_anddi3 ((TARGET_STV && TARGET_SSE2) || TARGET_64BIT)
#define HAVE_iorqi3 (TARGET_QIMODE_MATH)
#define HAVE_xorqi3 (TARGET_QIMODE_MATH)
#define HAVE_iorhi3 (TARGET_HIMODE_MATH)
#define HAVE_xorhi3 (TARGET_HIMODE_MATH)
#define HAVE_iorsi3 1
#define HAVE_xorsi3 1
#define HAVE_iordi3 ((TARGET_STV && TARGET_SSE2) || TARGET_64BIT)
#define HAVE_xordi3 ((TARGET_STV && TARGET_SSE2) || TARGET_64BIT)
#define HAVE_xorqi_cc_ext_1 1
#define HAVE_negqi2 (TARGET_QIMODE_MATH)
#define HAVE_neghi2 (TARGET_HIMODE_MATH)
#define HAVE_negsi2 1
#define HAVE_negdi2 1
#define HAVE_negti2 (TARGET_64BIT)
#define HAVE_negvqi3 1
#define HAVE_negvhi3 1
#define HAVE_negvsi3 1
#define HAVE_negvdi3 (TARGET_64BIT)
#define HAVE_abssf2 (TARGET_80387 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
#define HAVE_negsf2 (TARGET_80387 || (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
#define HAVE_absdf2 (TARGET_80387 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
#define HAVE_negdf2 (TARGET_80387 || (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
#define HAVE_absxf2 (TARGET_80387 || (SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH))
#define HAVE_negxf2 (TARGET_80387 || (SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH))
#define HAVE_abstf2 (TARGET_SSE)
#define HAVE_negtf2 (TARGET_SSE)
#define HAVE_copysignsf3 ((SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| (TARGET_SSE && (SFmode == TFmode)))
#define HAVE_copysigndf3 ((SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| (TARGET_SSE && (DFmode == TFmode)))
#define HAVE_copysigntf3 ((SSE_FLOAT_MODE_P (TFmode) && TARGET_SSE_MATH) \
|| (TARGET_SSE && (TFmode == TFmode)))
#define HAVE_one_cmplqi2 (TARGET_QIMODE_MATH)
#define HAVE_one_cmplhi2 (TARGET_HIMODE_MATH)
#define HAVE_one_cmplsi2 1
#define HAVE_one_cmpldi2 (TARGET_64BIT)
#define HAVE_ashlqi3 (TARGET_QIMODE_MATH)
#define HAVE_ashlhi3 (TARGET_HIMODE_MATH)
#define HAVE_ashlsi3 1
#define HAVE_ashldi3 1
#define HAVE_ashlti3 (TARGET_64BIT)
#define HAVE_x86_shiftsi_adj_1 (TARGET_CMOVE)
#define HAVE_x86_shiftdi_adj_1 ((TARGET_CMOVE) && (TARGET_64BIT))
#define HAVE_x86_shiftsi_adj_2 1
#define HAVE_x86_shiftdi_adj_2 (TARGET_64BIT)
#define HAVE_lshrqi3 (TARGET_QIMODE_MATH)
#define HAVE_ashrqi3 (TARGET_QIMODE_MATH)
#define HAVE_lshrhi3 (TARGET_HIMODE_MATH)
#define HAVE_ashrhi3 (TARGET_HIMODE_MATH)
#define HAVE_lshrsi3 1
#define HAVE_ashrsi3 1
#define HAVE_lshrdi3 1
#define HAVE_ashrdi3 1
#define HAVE_lshrti3 (TARGET_64BIT)
#define HAVE_ashrti3 (TARGET_64BIT)
#define HAVE_x86_shiftsi_adj_3 1
#define HAVE_x86_shiftdi_adj_3 (TARGET_64BIT)
#define HAVE_rotlti3 (TARGET_64BIT)
#define HAVE_rotrti3 (TARGET_64BIT)
#define HAVE_rotldi3 1
#define HAVE_rotrdi3 1
#define HAVE_rotlqi3 (TARGET_QIMODE_MATH)
#define HAVE_rotrqi3 (TARGET_QIMODE_MATH)
#define HAVE_rotlhi3 (TARGET_HIMODE_MATH)
#define HAVE_rotrhi3 (TARGET_HIMODE_MATH)
#define HAVE_rotlsi3 1
#define HAVE_rotrsi3 1
#define HAVE_indirect_jump 1
#define HAVE_tablejump 1
#define HAVE_call 1
#define HAVE_sibcall 1
#define HAVE_call_pop (!TARGET_64BIT)
#define HAVE_call_value 1
#define HAVE_sibcall_value 1
#define HAVE_call_value_pop (!TARGET_64BIT)
#define HAVE_untyped_call 1
#define HAVE_memory_blockage 1
#define HAVE_return (ix86_can_use_return_insn_p ())
#define HAVE_simple_return (!TARGET_SEH && !ix86_static_chain_on_stack)
#define HAVE_prologue 1
#define HAVE_set_got (!TARGET_64BIT)
#define HAVE_set_got_labelled (!TARGET_64BIT)
#define HAVE_epilogue 1
#define HAVE_sibcall_epilogue 1
#define HAVE_eh_return 1
#define HAVE_split_stack_prologue 1
#define HAVE_split_stack_space_check 1
#define HAVE_ffssi2 1
#define HAVE_ffsdi2 (TARGET_64BIT)
#define HAVE_ctzsi2 1
#define HAVE_ctzdi2 (TARGET_64BIT)
#define HAVE_bmi_tzcnt_hi (TARGET_BMI)
#define HAVE_bmi_tzcnt_si (TARGET_BMI)
#define HAVE_bmi_tzcnt_di ((TARGET_BMI) && (TARGET_64BIT))
#define HAVE_clzsi2 1
#define HAVE_clzdi2 (TARGET_64BIT)
#define HAVE_clzsi2_lzcnt (TARGET_LZCNT)
#define HAVE_clzdi2_lzcnt ((TARGET_LZCNT) && (TARGET_64BIT))
#define HAVE_lzcnt_hi (TARGET_LZCNT)
#define HAVE_lzcnt_si (TARGET_LZCNT)
#define HAVE_lzcnt_di ((TARGET_LZCNT) && (TARGET_64BIT))
#define HAVE_bmi2_bzhi_si3 (TARGET_BMI2)
#define HAVE_bmi2_bzhi_di3 ((TARGET_BMI2) && (TARGET_64BIT))
#define HAVE_popcounthi2 (TARGET_POPCNT)
#define HAVE_popcountsi2 (TARGET_POPCNT)
#define HAVE_popcountdi2 ((TARGET_POPCNT) && (TARGET_64BIT))
#define HAVE_bswapdi2 (TARGET_64BIT)
#define HAVE_bswapsi2 1
#define HAVE_paritydi2 (! TARGET_POPCNT)
#define HAVE_paritysi2 (! TARGET_POPCNT)
#define HAVE_tls_global_dynamic_32 1
#define HAVE_tls_global_dynamic_64_si ((TARGET_64BIT) && (Pmode == SImode))
#define HAVE_tls_global_dynamic_64_di ((TARGET_64BIT) && (Pmode == DImode))
#define HAVE_tls_local_dynamic_base_32 1
#define HAVE_tls_local_dynamic_base_64_si ((TARGET_64BIT) && (Pmode == SImode))
#define HAVE_tls_local_dynamic_base_64_di ((TARGET_64BIT) && (Pmode == DImode))
#define HAVE_tls_dynamic_gnu2_32 (!TARGET_64BIT && TARGET_GNU2_TLS)
#define HAVE_tls_dynamic_gnu2_64 (TARGET_64BIT && TARGET_GNU2_TLS)
#define HAVE_rsqrtsf2 (TARGET_SSE && TARGET_SSE_MATH)
#define HAVE_sqrtsf2 ((TARGET_USE_FANCY_MATH_387 && X87_ENABLE_ARITH (SFmode)) \
|| (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
#define HAVE_sqrtdf2 ((TARGET_USE_FANCY_MATH_387 && X87_ENABLE_ARITH (DFmode)) \
|| (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
#define HAVE_fmodxf3 (TARGET_USE_FANCY_MATH_387 \
&& flag_finite_math_only)
#define HAVE_fmodsf3 (TARGET_USE_FANCY_MATH_387 \
&& flag_finite_math_only)
#define HAVE_fmoddf3 (TARGET_USE_FANCY_MATH_387 \
&& flag_finite_math_only)
#define HAVE_remainderxf3 (TARGET_USE_FANCY_MATH_387 \
&& flag_finite_math_only)
#define HAVE_remaindersf3 (TARGET_USE_FANCY_MATH_387 \
&& flag_finite_math_only)
#define HAVE_remainderdf3 (TARGET_USE_FANCY_MATH_387 \
&& flag_finite_math_only)
#define HAVE_sincossf3 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_sincosdf3 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_tanxf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_tansf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_tandf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_atan2xf3 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_atan2sf3 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_atan2df3 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_atanxf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_atansf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_atandf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_asinxf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_asinsf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_asindf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_acosxf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_acossf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_acosdf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_logxf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_logsf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_logdf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_log10xf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_log10sf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_log10df2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_log2xf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_log2sf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_log2df2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_log1pxf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_log1psf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_log1pdf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_logbxf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_logbsf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_logbdf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_ilogbxf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_ilogbsf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_ilogbdf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_expNcorexf3 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_expxf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_expsf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_expdf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_exp10xf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_exp10sf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_exp10df2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_exp2xf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_exp2sf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_exp2df2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_expm1xf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_expm1sf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_expm1df2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_ldexpxf3 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_ldexpsf3 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_ldexpdf3 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_scalbxf3 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_scalbsf3 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_scalbdf3 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_significandxf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_significandsf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_significanddf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_rintsf2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math))
#define HAVE_rintdf2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math))
#define HAVE_roundsf2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math && !flag_rounding_math))
#define HAVE_rounddf2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math && !flag_rounding_math))
#define HAVE_roundxf2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math && !flag_rounding_math))
#define HAVE_lrintxfhi2 (TARGET_USE_FANCY_MATH_387)
#define HAVE_lrintxfsi2 (TARGET_USE_FANCY_MATH_387)
#define HAVE_lrintxfdi2 (TARGET_USE_FANCY_MATH_387)
#define HAVE_lrintsfsi2 (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH)
#define HAVE_lrintsfdi2 ((SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) && (TARGET_64BIT))
#define HAVE_lrintdfsi2 (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH)
#define HAVE_lrintdfdi2 ((SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) && (TARGET_64BIT))
#define HAVE_lroundsfhi2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
&& HImode != HImode \
&& ((HImode != DImode) || TARGET_64BIT) \
&& !flag_trapping_math && !flag_rounding_math))
#define HAVE_lrounddfhi2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
&& HImode != HImode \
&& ((HImode != DImode) || TARGET_64BIT) \
&& !flag_trapping_math && !flag_rounding_math))
#define HAVE_lroundxfhi2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH \
&& HImode != HImode \
&& ((HImode != DImode) || TARGET_64BIT) \
&& !flag_trapping_math && !flag_rounding_math))
#define HAVE_lroundsfsi2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
&& SImode != HImode \
&& ((SImode != DImode) || TARGET_64BIT) \
&& !flag_trapping_math && !flag_rounding_math))
#define HAVE_lrounddfsi2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
&& SImode != HImode \
&& ((SImode != DImode) || TARGET_64BIT) \
&& !flag_trapping_math && !flag_rounding_math))
#define HAVE_lroundxfsi2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH \
&& SImode != HImode \
&& ((SImode != DImode) || TARGET_64BIT) \
&& !flag_trapping_math && !flag_rounding_math))
#define HAVE_lroundsfdi2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
&& DImode != HImode \
&& ((DImode != DImode) || TARGET_64BIT) \
&& !flag_trapping_math && !flag_rounding_math))
#define HAVE_lrounddfdi2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
&& DImode != HImode \
&& ((DImode != DImode) || TARGET_64BIT) \
&& !flag_trapping_math && !flag_rounding_math))
#define HAVE_lroundxfdi2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH \
&& DImode != HImode \
&& ((DImode != DImode) || TARGET_64BIT) \
&& !flag_trapping_math && !flag_rounding_math))
#define HAVE_floorxf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_ceilxf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_btruncxf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_floorsf2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math))
#define HAVE_ceilsf2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math))
#define HAVE_btruncsf2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math))
#define HAVE_floordf2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math))
#define HAVE_ceildf2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math))
#define HAVE_btruncdf2 ((TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations) \
|| (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math))
#define HAVE_nearbyintxf2 (TARGET_USE_FANCY_MATH_387 \
&& flag_unsafe_math_optimizations)
#define HAVE_nearbyintsf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_nearbyintdf2 (TARGET_USE_FANCY_MATH_387 \
&& (!(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH) \
|| TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_lfloorxfhi2 (TARGET_USE_FANCY_MATH_387 \
&& (!TARGET_SSE_MATH || TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_lceilxfhi2 (TARGET_USE_FANCY_MATH_387 \
&& (!TARGET_SSE_MATH || TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_lfloorxfsi2 (TARGET_USE_FANCY_MATH_387 \
&& (!TARGET_SSE_MATH || TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_lceilxfsi2 (TARGET_USE_FANCY_MATH_387 \
&& (!TARGET_SSE_MATH || TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_lfloorxfdi2 (TARGET_USE_FANCY_MATH_387 \
&& (!TARGET_SSE_MATH || TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_lceilxfdi2 (TARGET_USE_FANCY_MATH_387 \
&& (!TARGET_SSE_MATH || TARGET_MIX_SSE_I387) \
&& flag_unsafe_math_optimizations)
#define HAVE_lfloorsfsi2 (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math)
#define HAVE_lceilsfsi2 (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math)
#define HAVE_lfloorsfdi2 ((SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math) && (TARGET_64BIT))
#define HAVE_lceilsfdi2 ((SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math) && (TARGET_64BIT))
#define HAVE_lfloordfsi2 (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math)
#define HAVE_lceildfsi2 (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math)
#define HAVE_lfloordfdi2 ((SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math) && (TARGET_64BIT))
#define HAVE_lceildfdi2 ((SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH \
&& !flag_trapping_math) && (TARGET_64BIT))
#define HAVE_isinfxf2 (TARGET_USE_FANCY_MATH_387 \
&& ix86_libc_has_function (function_c99_misc))
#define HAVE_isinfsf2 (TARGET_USE_FANCY_MATH_387 \
&& ix86_libc_has_function (function_c99_misc) \
&& !(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
#define HAVE_isinfdf2 (TARGET_USE_FANCY_MATH_387 \
&& ix86_libc_has_function (function_c99_misc) \
&& !(SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
#define HAVE_signbitxf2 (TARGET_USE_FANCY_MATH_387)
#define HAVE_signbitdf2 (TARGET_USE_FANCY_MATH_387 \
|| (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
#define HAVE_signbitsf2 (TARGET_USE_FANCY_MATH_387 \
&& !(SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
#define HAVE_movmemsi 1
#define HAVE_movmemdi (TARGET_64BIT)
#define HAVE_strmov 1
#define HAVE_strmov_singleop 1
#define HAVE_rep_mov 1
#define HAVE_setmemsi 1
#define HAVE_setmemdi (TARGET_64BIT)
#define HAVE_strset 1
#define HAVE_strset_singleop 1
#define HAVE_rep_stos 1
#define HAVE_cmpstrnsi 1
#define HAVE_cmpintqi 1
#define HAVE_cmpstrnqi_nz_1 1
#define HAVE_cmpstrnqi_1 1
#define HAVE_strlensi (Pmode == SImode)
#define HAVE_strlendi (Pmode == DImode)
#define HAVE_strlenqi_1 1
#define HAVE_movqicc (TARGET_QIMODE_MATH)
#define HAVE_movhicc (TARGET_HIMODE_MATH)
#define HAVE_movsicc 1
#define HAVE_movdicc (TARGET_64BIT)
#define HAVE_x86_movsicc_0_m1 1
#define HAVE_x86_movdicc_0_m1 (TARGET_64BIT)
#define HAVE_movsfcc ((TARGET_80387 && TARGET_CMOVE) \
|| (SSE_FLOAT_MODE_P (SFmode) && TARGET_SSE_MATH))
#define HAVE_movdfcc ((TARGET_80387 && TARGET_CMOVE) \
|| (SSE_FLOAT_MODE_P (DFmode) && TARGET_SSE_MATH))
#define HAVE_movxfcc ((TARGET_80387 && TARGET_CMOVE) \
|| (SSE_FLOAT_MODE_P (XFmode) && TARGET_SSE_MATH))
#define HAVE_addqicc 1
#define HAVE_addhicc 1
#define HAVE_addsicc 1
#define HAVE_adddicc (TARGET_64BIT)
#define HAVE_allocate_stack (ix86_target_stack_probe ())
#define HAVE_probe_stack 1
#define HAVE_builtin_setjmp_receiver (!TARGET_64BIT && flag_pic)
#define HAVE_prefetch (TARGET_3DNOW || TARGET_PREFETCH_SSE || TARGET_PRFCHW || TARGET_PREFETCHWT1)
#define HAVE_stack_protect_set (TARGET_SSP_TLS_GUARD)
#define HAVE_stack_protect_test (TARGET_SSP_TLS_GUARD)
#define HAVE_lwp_llwpcb (TARGET_LWP)
#define HAVE_lwp_slwpcb (TARGET_LWP)
#define HAVE_lwp_lwpvalsi3 (TARGET_LWP)
#define HAVE_lwp_lwpvaldi3 ((TARGET_LWP) && (TARGET_64BIT))
#define HAVE_lwp_lwpinssi3 (TARGET_LWP)
#define HAVE_lwp_lwpinsdi3 ((TARGET_LWP) && (TARGET_64BIT))
#define HAVE_pause 1
#define HAVE_xbegin (TARGET_RTM)
#define HAVE_xtest (TARGET_RTM)
#define HAVE_bnd32_mk ((TARGET_MPX) && (!TARGET_LP64))
#define HAVE_bnd64_mk ((TARGET_MPX) && (TARGET_LP64))
#define HAVE_movbnd32 ((TARGET_MPX) && (!TARGET_LP64))
#define HAVE_movbnd64 ((TARGET_MPX) && (TARGET_LP64))
#define HAVE_bnd32_cl ((TARGET_MPX) && (!TARGET_LP64))
#define HAVE_bnd32_cu ((TARGET_MPX) && (!TARGET_LP64))
#define HAVE_bnd32_cn ((TARGET_MPX) && (!TARGET_LP64))
#define HAVE_bnd64_cl ((TARGET_MPX) && (TARGET_LP64))
#define HAVE_bnd64_cu ((TARGET_MPX) && (TARGET_LP64))
#define HAVE_bnd64_cn ((TARGET_MPX) && (TARGET_LP64))
#define HAVE_bnd32_ldx ((TARGET_MPX) && (!TARGET_LP64))
#define HAVE_bnd64_ldx ((TARGET_MPX) && (TARGET_LP64))
#define HAVE_bnd32_stx ((TARGET_MPX) && (!TARGET_LP64))
#define HAVE_bnd64_stx ((TARGET_MPX) && (TARGET_LP64))
#define HAVE_rdpkru (TARGET_PKU)
#define HAVE_wrpkru (TARGET_PKU)
#define HAVE_movv8qi (TARGET_MMX)
#define HAVE_movv4hi (TARGET_MMX)
#define HAVE_movv2si (TARGET_MMX)
#define HAVE_movv1di (TARGET_MMX)
#define HAVE_movv2sf (TARGET_MMX)
#define HAVE_movmisalignv8qi (TARGET_MMX)
#define HAVE_movmisalignv4hi (TARGET_MMX)
#define HAVE_movmisalignv2si (TARGET_MMX)
#define HAVE_movmisalignv1di (TARGET_MMX)
#define HAVE_movmisalignv2sf (TARGET_MMX)
#define HAVE_mmx_addv2sf3 (TARGET_3DNOW)
#define HAVE_mmx_subv2sf3 (TARGET_3DNOW)
#define HAVE_mmx_subrv2sf3 (TARGET_3DNOW)
#define HAVE_mmx_mulv2sf3 (TARGET_3DNOW)
#define HAVE_mmx_smaxv2sf3 (TARGET_3DNOW)
#define HAVE_mmx_sminv2sf3 (TARGET_3DNOW)
#define HAVE_mmx_eqv2sf3 (TARGET_3DNOW)
#define HAVE_vec_setv2sf (TARGET_MMX)
#define HAVE_vec_extractv2sf (TARGET_MMX)
#define HAVE_vec_initv2sf (TARGET_SSE)
#define HAVE_mmx_addv8qi3 (TARGET_MMX || (TARGET_SSE2 && V8QImode == V1DImode))
#define HAVE_mmx_subv8qi3 (TARGET_MMX || (TARGET_SSE2 && V8QImode == V1DImode))
#define HAVE_mmx_addv4hi3 (TARGET_MMX || (TARGET_SSE2 && V4HImode == V1DImode))
#define HAVE_mmx_subv4hi3 (TARGET_MMX || (TARGET_SSE2 && V4HImode == V1DImode))
#define HAVE_mmx_addv2si3 (TARGET_MMX || (TARGET_SSE2 && V2SImode == V1DImode))
#define HAVE_mmx_subv2si3 (TARGET_MMX || (TARGET_SSE2 && V2SImode == V1DImode))
#define HAVE_mmx_addv1di3 (TARGET_MMX || (TARGET_SSE2 && V1DImode == V1DImode))
#define HAVE_mmx_subv1di3 (TARGET_MMX || (TARGET_SSE2 && V1DImode == V1DImode))
#define HAVE_mmx_ssaddv8qi3 (TARGET_MMX)
#define HAVE_mmx_usaddv8qi3 (TARGET_MMX)
#define HAVE_mmx_sssubv8qi3 (TARGET_MMX)
#define HAVE_mmx_ussubv8qi3 (TARGET_MMX)
#define HAVE_mmx_ssaddv4hi3 (TARGET_MMX)
#define HAVE_mmx_usaddv4hi3 (TARGET_MMX)
#define HAVE_mmx_sssubv4hi3 (TARGET_MMX)
#define HAVE_mmx_ussubv4hi3 (TARGET_MMX)
#define HAVE_mmx_mulv4hi3 (TARGET_MMX)
#define HAVE_mmx_smulv4hi3_highpart (TARGET_MMX)
#define HAVE_mmx_umulv4hi3_highpart (TARGET_SSE || TARGET_3DNOW_A)
#define HAVE_mmx_pmaddwd (TARGET_MMX)
#define HAVE_mmx_pmulhrwv4hi3 (TARGET_3DNOW)
#define HAVE_sse2_umulv1siv1di3 (TARGET_SSE2)
#define HAVE_mmx_smaxv4hi3 (TARGET_SSE || TARGET_3DNOW_A)
#define HAVE_mmx_sminv4hi3 (TARGET_SSE || TARGET_3DNOW_A)
#define HAVE_mmx_umaxv8qi3 (TARGET_SSE || TARGET_3DNOW_A)
#define HAVE_mmx_uminv8qi3 (TARGET_SSE || TARGET_3DNOW_A)
#define HAVE_mmx_eqv8qi3 (TARGET_MMX)
#define HAVE_mmx_eqv4hi3 (TARGET_MMX)
#define HAVE_mmx_eqv2si3 (TARGET_MMX)
#define HAVE_mmx_andv8qi3 (TARGET_MMX)
#define HAVE_mmx_iorv8qi3 (TARGET_MMX)
#define HAVE_mmx_xorv8qi3 (TARGET_MMX)
#define HAVE_mmx_andv4hi3 (TARGET_MMX)
#define HAVE_mmx_iorv4hi3 (TARGET_MMX)
#define HAVE_mmx_xorv4hi3 (TARGET_MMX)
#define HAVE_mmx_andv2si3 (TARGET_MMX)
#define HAVE_mmx_iorv2si3 (TARGET_MMX)
#define HAVE_mmx_xorv2si3 (TARGET_MMX)
#define HAVE_mmx_pinsrw (TARGET_SSE || TARGET_3DNOW_A)
#define HAVE_mmx_pshufw (TARGET_SSE || TARGET_3DNOW_A)
#define HAVE_vec_setv2si (TARGET_MMX)
#define HAVE_vec_extractv2si (TARGET_MMX)
#define HAVE_vec_initv2si (TARGET_SSE)
#define HAVE_vec_setv4hi (TARGET_MMX)
#define HAVE_vec_extractv4hi (TARGET_MMX)
#define HAVE_vec_initv4hi (TARGET_SSE)
#define HAVE_vec_setv8qi (TARGET_MMX)
#define HAVE_vec_extractv8qi (TARGET_MMX)
#define HAVE_vec_initv8qi (TARGET_SSE)
#define HAVE_mmx_uavgv8qi3 (TARGET_SSE || TARGET_3DNOW)
#define HAVE_mmx_uavgv4hi3 (TARGET_SSE || TARGET_3DNOW_A)
#define HAVE_mmx_maskmovq (TARGET_SSE || TARGET_3DNOW_A)
#define HAVE_mmx_emms (TARGET_MMX)
#define HAVE_mmx_femms (TARGET_3DNOW)
#define HAVE_movv64qi ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_movv32qi ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_movv16qi (TARGET_SSE)
#define HAVE_movv32hi ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_movv16hi ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_movv8hi (TARGET_SSE)
#define HAVE_movv16si ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_movv8si ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_movv4si (TARGET_SSE)
#define HAVE_movv8di ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_movv4di ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_movv2di (TARGET_SSE)
#define HAVE_movv4ti ((TARGET_SSE) && (TARGET_AVX512BW))
#define HAVE_movv2ti ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_movv1ti (TARGET_SSE)
#define HAVE_movv16sf ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_movv8sf ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_movv4sf (TARGET_SSE)
#define HAVE_movv8df ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_movv4df ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_movv2df (TARGET_SSE)
#define HAVE_movmisalignv64qi ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_movmisalignv32qi ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_movmisalignv16qi (TARGET_SSE)
#define HAVE_movmisalignv32hi ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_movmisalignv16hi ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_movmisalignv8hi (TARGET_SSE)
#define HAVE_movmisalignv16si ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_movmisalignv8si ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_movmisalignv4si (TARGET_SSE)
#define HAVE_movmisalignv8di ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_movmisalignv4di ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_movmisalignv2di (TARGET_SSE)
#define HAVE_movmisalignv4ti ((TARGET_SSE) && (TARGET_AVX512BW))
#define HAVE_movmisalignv2ti ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_movmisalignv1ti (TARGET_SSE)
#define HAVE_movmisalignv16sf ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_movmisalignv8sf ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_movmisalignv4sf (TARGET_SSE)
#define HAVE_movmisalignv8df ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_movmisalignv4df ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_movmisalignv2df (TARGET_SSE)
#define HAVE_avx512f_loadups512 ((TARGET_SSE && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_loadups512_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx_loadups256 ((TARGET_SSE && 1) && (TARGET_AVX))
#define HAVE_avx_loadups256_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX)))
#define HAVE_sse_loadups (TARGET_SSE && 1)
#define HAVE_sse_loadups_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL)))
#define HAVE_avx512f_loadupd512 ((TARGET_SSE && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_loadupd512_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx_loadupd256 ((TARGET_SSE && 1) && (TARGET_AVX))
#define HAVE_avx_loadupd256_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX)))
#define HAVE_sse2_loadupd ((TARGET_SSE && 1) && (TARGET_SSE2))
#define HAVE_sse2_loadupd_mask ((TARGET_AVX512F) && ((TARGET_SSE && (16 == 64 || TARGET_AVX512VL)) && (TARGET_SSE2)))
#define HAVE_avx_loaddquv32qi ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX))
#define HAVE_avx_loaddquv32qi_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && TARGET_AVX512VL && TARGET_AVX512BW) && (TARGET_AVX)))
#define HAVE_sse2_loaddquv16qi (TARGET_SSE2 && 1 && 1)
#define HAVE_sse2_loaddquv16qi_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL && TARGET_AVX512BW))
#define HAVE_avx512f_loaddquv64qi (TARGET_AVX512BW)
#define HAVE_avx512f_loaddquv64qi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512bw_loaddquv32hi (TARGET_AVX512BW)
#define HAVE_avx512bw_loaddquv32hi_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512vl_loaddquv8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_loaddquv8hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_loaddquv16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_loaddquv16hi_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512f_loaddquv16si (TARGET_AVX512F)
#define HAVE_avx512f_loaddquv16si_mask (TARGET_AVX512F)
#define HAVE_avx_loaddquv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx_loaddquv8si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_sse2_loaddquv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_sse2_loaddquv4si_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_loaddquv8di (TARGET_AVX512F)
#define HAVE_avx512f_loaddquv8di_mask (TARGET_AVX512F)
#define HAVE_avx512vl_loaddquv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_loaddquv4di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_loaddquv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_loaddquv2di_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_storentdi ((TARGET_SSE) && (TARGET_SSE2 && TARGET_64BIT))
#define HAVE_storentsi ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_storentsf ((TARGET_SSE) && (TARGET_SSE4A))
#define HAVE_storentdf ((TARGET_SSE) && (TARGET_SSE4A))
#define HAVE_storentv8di ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_storentv4di ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_storentv2di ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_storentv16sf ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_storentv8sf ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_storentv4sf (TARGET_SSE)
#define HAVE_storentv8df ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_storentv4df ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_storentv2df ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_absv16sf2 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_negv16sf2 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_absv8sf2 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_negv8sf2 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_absv4sf2 (TARGET_SSE)
#define HAVE_negv4sf2 (TARGET_SSE)
#define HAVE_absv8df2 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_negv8df2 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_absv4df2 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_negv4df2 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_absv2df2 ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_negv2df2 ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_addv16sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
#define HAVE_addv16sf3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_addv16sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_addv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_subv16sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
#define HAVE_subv16sf3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_subv16sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_subv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_addv8sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
#define HAVE_addv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_subv8sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
#define HAVE_subv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_addv4sf3 (TARGET_SSE && 1 && 1)
#define HAVE_addv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1))
#define HAVE_subv4sf3 (TARGET_SSE && 1 && 1)
#define HAVE_subv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1))
#define HAVE_addv8df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
#define HAVE_addv8df3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_addv8df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_addv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_subv8df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
#define HAVE_subv8df3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_subv8df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_subv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_addv4df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
#define HAVE_addv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_subv4df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
#define HAVE_subv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_addv2df3 ((TARGET_SSE && 1 && 1) && (TARGET_SSE2))
#define HAVE_addv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
#define HAVE_subv2df3 ((TARGET_SSE && 1 && 1) && (TARGET_SSE2))
#define HAVE_subv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
#define HAVE_mulv16sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
#define HAVE_mulv16sf3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_mulv16sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_mulv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_mulv8sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
#define HAVE_mulv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_mulv4sf3 (TARGET_SSE && 1 && 1)
#define HAVE_mulv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1))
#define HAVE_mulv8df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
#define HAVE_mulv8df3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_mulv8df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_mulv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_mulv4df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
#define HAVE_mulv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_mulv2df3 ((TARGET_SSE && 1 && 1) && (TARGET_SSE2))
#define HAVE_mulv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
#define HAVE_divv8df3 ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_divv4df3 ((TARGET_SSE2) && (TARGET_AVX))
#define HAVE_divv2df3 (TARGET_SSE2)
#define HAVE_divv16sf3 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_divv8sf3 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_divv4sf3 (TARGET_SSE)
#define HAVE_sqrtv8df2 ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_sqrtv4df2 ((TARGET_SSE2) && (TARGET_AVX))
#define HAVE_sqrtv2df2 (TARGET_SSE2)
#define HAVE_sqrtv16sf2 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_sqrtv8sf2 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_sqrtv4sf2 (TARGET_SSE)
#define HAVE_rsqrtv8sf2 ((TARGET_SSE_MATH) && (TARGET_AVX))
#define HAVE_rsqrtv4sf2 (TARGET_SSE_MATH)
#define HAVE_smaxv16sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
#define HAVE_smaxv16sf3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_smaxv16sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_smaxv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_sminv16sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
#define HAVE_sminv16sf3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_sminv16sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_sminv16sf3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V16SFmode == V16SFmode \
|| V16SFmode == V8DFmode \
|| V16SFmode == V8DImode \
|| V16SFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_smaxv8sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
#define HAVE_smaxv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_sminv8sf3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
#define HAVE_sminv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_smaxv4sf3 (TARGET_SSE && 1 && 1)
#define HAVE_smaxv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1))
#define HAVE_sminv4sf3 (TARGET_SSE && 1 && 1)
#define HAVE_sminv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1))
#define HAVE_smaxv8df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
#define HAVE_smaxv8df3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_smaxv8df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_smaxv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_sminv8df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX512F))
#define HAVE_sminv8df3_round ((TARGET_AVX512F) && ((TARGET_SSE && 1 && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F)))
#define HAVE_sminv8df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX512F)))
#define HAVE_sminv8df3_mask_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && ((TARGET_SSE && (64 == 64 || TARGET_AVX512VL) && (V8DFmode == V16SFmode \
|| V8DFmode == V8DFmode \
|| V8DFmode == V8DImode \
|| V8DFmode == V16SImode)) && (TARGET_AVX512F))))
#define HAVE_smaxv4df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
#define HAVE_smaxv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_sminv4df3 ((TARGET_SSE && 1 && 1) && (TARGET_AVX))
#define HAVE_sminv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (32 == 64 || TARGET_AVX512VL) && 1) && (TARGET_AVX)))
#define HAVE_smaxv2df3 ((TARGET_SSE && 1 && 1) && (TARGET_SSE2))
#define HAVE_smaxv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
#define HAVE_sminv2df3 ((TARGET_SSE && 1 && 1) && (TARGET_SSE2))
#define HAVE_sminv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && (16 == 64 || TARGET_AVX512VL) && 1) && (TARGET_SSE2)))
#define HAVE_sse3_haddv2df3 (TARGET_SSE3)
#define HAVE_reduc_plus_scal_v8df (TARGET_AVX512F)
#define HAVE_reduc_plus_scal_v4df (TARGET_AVX)
#define HAVE_reduc_plus_scal_v2df (TARGET_SSE3)
#define HAVE_reduc_plus_scal_v16sf (TARGET_AVX512F)
#define HAVE_reduc_plus_scal_v8sf (TARGET_AVX)
#define HAVE_reduc_plus_scal_v4sf (TARGET_SSE)
#define HAVE_reduc_smax_scal_v32qi (TARGET_AVX2)
#define HAVE_reduc_smin_scal_v32qi (TARGET_AVX2)
#define HAVE_reduc_smax_scal_v16hi (TARGET_AVX2)
#define HAVE_reduc_smin_scal_v16hi (TARGET_AVX2)
#define HAVE_reduc_smax_scal_v8si (TARGET_AVX2)
#define HAVE_reduc_smin_scal_v8si (TARGET_AVX2)
#define HAVE_reduc_smax_scal_v4di (TARGET_AVX2)
#define HAVE_reduc_smin_scal_v4di (TARGET_AVX2)
#define HAVE_reduc_smax_scal_v8sf (TARGET_AVX)
#define HAVE_reduc_smin_scal_v8sf (TARGET_AVX)
#define HAVE_reduc_smax_scal_v4df (TARGET_AVX)
#define HAVE_reduc_smin_scal_v4df (TARGET_AVX)
#define HAVE_reduc_smax_scal_v4sf (TARGET_SSE)
#define HAVE_reduc_smin_scal_v4sf (TARGET_SSE)
#define HAVE_reduc_smax_scal_v64qi (TARGET_AVX512BW)
#define HAVE_reduc_smin_scal_v64qi (TARGET_AVX512BW)
#define HAVE_reduc_smax_scal_v32hi (TARGET_AVX512BW)
#define HAVE_reduc_smin_scal_v32hi (TARGET_AVX512BW)
#define HAVE_reduc_smax_scal_v16si (TARGET_AVX512F)
#define HAVE_reduc_smin_scal_v16si (TARGET_AVX512F)
#define HAVE_reduc_smax_scal_v8di (TARGET_AVX512F)
#define HAVE_reduc_smin_scal_v8di (TARGET_AVX512F)
#define HAVE_reduc_smax_scal_v16sf (TARGET_AVX512F)
#define HAVE_reduc_smin_scal_v16sf (TARGET_AVX512F)
#define HAVE_reduc_smax_scal_v8df (TARGET_AVX512F)
#define HAVE_reduc_smin_scal_v8df (TARGET_AVX512F)
#define HAVE_reduc_umax_scal_v16si (TARGET_AVX512F)
#define HAVE_reduc_umin_scal_v16si (TARGET_AVX512F)
#define HAVE_reduc_umax_scal_v8di (TARGET_AVX512F)
#define HAVE_reduc_umin_scal_v8di (TARGET_AVX512F)
#define HAVE_reduc_umax_scal_v32hi ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_reduc_umin_scal_v32hi ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_reduc_umax_scal_v64qi ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_reduc_umin_scal_v64qi ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_reduc_umax_scal_v32qi (TARGET_AVX2)
#define HAVE_reduc_umin_scal_v32qi (TARGET_AVX2)
#define HAVE_reduc_umax_scal_v16hi (TARGET_AVX2)
#define HAVE_reduc_umin_scal_v16hi (TARGET_AVX2)
#define HAVE_reduc_umax_scal_v8si (TARGET_AVX2)
#define HAVE_reduc_umin_scal_v8si (TARGET_AVX2)
#define HAVE_reduc_umax_scal_v4di (TARGET_AVX2)
#define HAVE_reduc_umin_scal_v4di (TARGET_AVX2)
#define HAVE_reduc_umin_scal_v8hi (TARGET_SSE4_1)
#define HAVE_vec_cmpv16sihi (TARGET_AVX512F)
#define HAVE_vec_cmpv8siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vec_cmpv4siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vec_cmpv8diqi (TARGET_AVX512F)
#define HAVE_vec_cmpv4diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vec_cmpv2diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vec_cmpv16sfhi (TARGET_AVX512F)
#define HAVE_vec_cmpv8sfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vec_cmpv4sfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vec_cmpv8dfqi (TARGET_AVX512F)
#define HAVE_vec_cmpv4dfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vec_cmpv2dfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vec_cmpv64qidi (TARGET_AVX512BW)
#define HAVE_vec_cmpv16qihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_vec_cmpv32qisi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_vec_cmpv32hisi (TARGET_AVX512BW)
#define HAVE_vec_cmpv16hihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_vec_cmpv8hiqi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_vec_cmpv32qiv32qi (TARGET_AVX2)
#define HAVE_vec_cmpv16hiv16hi (TARGET_AVX2)
#define HAVE_vec_cmpv8siv8si (TARGET_AVX2)
#define HAVE_vec_cmpv4div4di (TARGET_AVX2)
#define HAVE_vec_cmpv16qiv16qi (TARGET_SSE2)
#define HAVE_vec_cmpv8hiv8hi (TARGET_SSE2)
#define HAVE_vec_cmpv4siv4si (TARGET_SSE2)
#define HAVE_vec_cmpv2div2di (TARGET_SSE4_2)
#define HAVE_vec_cmpv8sfv8si (TARGET_AVX)
#define HAVE_vec_cmpv4dfv4di (TARGET_AVX)
#define HAVE_vec_cmpv4sfv4si (TARGET_SSE)
#define HAVE_vec_cmpv2dfv2di ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_vec_cmpuv16sihi (TARGET_AVX512F)
#define HAVE_vec_cmpuv8siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vec_cmpuv4siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vec_cmpuv8diqi (TARGET_AVX512F)
#define HAVE_vec_cmpuv4diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vec_cmpuv2diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vec_cmpuv64qidi (TARGET_AVX512BW)
#define HAVE_vec_cmpuv16qihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_vec_cmpuv32qisi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_vec_cmpuv32hisi (TARGET_AVX512BW)
#define HAVE_vec_cmpuv16hihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_vec_cmpuv8hiqi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_vec_cmpuv32qiv32qi (TARGET_AVX2)
#define HAVE_vec_cmpuv16hiv16hi (TARGET_AVX2)
#define HAVE_vec_cmpuv8siv8si (TARGET_AVX2)
#define HAVE_vec_cmpuv4div4di (TARGET_AVX2)
#define HAVE_vec_cmpuv16qiv16qi (TARGET_SSE2)
#define HAVE_vec_cmpuv8hiv8hi (TARGET_SSE2)
#define HAVE_vec_cmpuv4siv4si (TARGET_SSE2)
#define HAVE_vec_cmpuv2div2di (TARGET_SSE4_2)
#define HAVE_vcondv64qiv16sf (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V64QImode) \
== GET_MODE_NUNITS (V16SFmode)))
#define HAVE_vcondv32hiv16sf (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V32HImode) \
== GET_MODE_NUNITS (V16SFmode)))
#define HAVE_vcondv16siv16sf (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SImode) \
== GET_MODE_NUNITS (V16SFmode)))
#define HAVE_vcondv8div16sf (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DImode) \
== GET_MODE_NUNITS (V16SFmode)))
#define HAVE_vcondv16sfv16sf (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SFmode) \
== GET_MODE_NUNITS (V16SFmode)))
#define HAVE_vcondv8dfv16sf (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DFmode) \
== GET_MODE_NUNITS (V16SFmode)))
#define HAVE_vcondv64qiv8df (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V64QImode) \
== GET_MODE_NUNITS (V8DFmode)))
#define HAVE_vcondv32hiv8df (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V32HImode) \
== GET_MODE_NUNITS (V8DFmode)))
#define HAVE_vcondv16siv8df (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SImode) \
== GET_MODE_NUNITS (V8DFmode)))
#define HAVE_vcondv8div8df (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DImode) \
== GET_MODE_NUNITS (V8DFmode)))
#define HAVE_vcondv16sfv8df (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SFmode) \
== GET_MODE_NUNITS (V8DFmode)))
#define HAVE_vcondv8dfv8df (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DFmode) \
== GET_MODE_NUNITS (V8DFmode)))
#define HAVE_vcondv32qiv8sf (TARGET_AVX \
&& (GET_MODE_NUNITS (V32QImode) \
== GET_MODE_NUNITS (V8SFmode)))
#define HAVE_vcondv32qiv4df (TARGET_AVX \
&& (GET_MODE_NUNITS (V32QImode) \
== GET_MODE_NUNITS (V4DFmode)))
#define HAVE_vcondv16hiv8sf (TARGET_AVX \
&& (GET_MODE_NUNITS (V16HImode) \
== GET_MODE_NUNITS (V8SFmode)))
#define HAVE_vcondv16hiv4df (TARGET_AVX \
&& (GET_MODE_NUNITS (V16HImode) \
== GET_MODE_NUNITS (V4DFmode)))
#define HAVE_vcondv8siv8sf (TARGET_AVX \
&& (GET_MODE_NUNITS (V8SImode) \
== GET_MODE_NUNITS (V8SFmode)))
#define HAVE_vcondv8siv4df (TARGET_AVX \
&& (GET_MODE_NUNITS (V8SImode) \
== GET_MODE_NUNITS (V4DFmode)))
#define HAVE_vcondv4div8sf (TARGET_AVX \
&& (GET_MODE_NUNITS (V4DImode) \
== GET_MODE_NUNITS (V8SFmode)))
#define HAVE_vcondv4div4df (TARGET_AVX \
&& (GET_MODE_NUNITS (V4DImode) \
== GET_MODE_NUNITS (V4DFmode)))
#define HAVE_vcondv8sfv8sf (TARGET_AVX \
&& (GET_MODE_NUNITS (V8SFmode) \
== GET_MODE_NUNITS (V8SFmode)))
#define HAVE_vcondv8sfv4df (TARGET_AVX \
&& (GET_MODE_NUNITS (V8SFmode) \
== GET_MODE_NUNITS (V4DFmode)))
#define HAVE_vcondv4dfv8sf (TARGET_AVX \
&& (GET_MODE_NUNITS (V4DFmode) \
== GET_MODE_NUNITS (V8SFmode)))
#define HAVE_vcondv4dfv4df (TARGET_AVX \
&& (GET_MODE_NUNITS (V4DFmode) \
== GET_MODE_NUNITS (V4DFmode)))
#define HAVE_vcondv16qiv4sf (TARGET_SSE \
&& (GET_MODE_NUNITS (V16QImode) \
== GET_MODE_NUNITS (V4SFmode)))
#define HAVE_vcondv16qiv2df ((TARGET_SSE \
&& (GET_MODE_NUNITS (V16QImode) \
== GET_MODE_NUNITS (V2DFmode))) && (TARGET_SSE2))
#define HAVE_vcondv8hiv4sf (TARGET_SSE \
&& (GET_MODE_NUNITS (V8HImode) \
== GET_MODE_NUNITS (V4SFmode)))
#define HAVE_vcondv8hiv2df ((TARGET_SSE \
&& (GET_MODE_NUNITS (V8HImode) \
== GET_MODE_NUNITS (V2DFmode))) && (TARGET_SSE2))
#define HAVE_vcondv4siv4sf (TARGET_SSE \
&& (GET_MODE_NUNITS (V4SImode) \
== GET_MODE_NUNITS (V4SFmode)))
#define HAVE_vcondv4siv2df ((TARGET_SSE \
&& (GET_MODE_NUNITS (V4SImode) \
== GET_MODE_NUNITS (V2DFmode))) && (TARGET_SSE2))
#define HAVE_vcondv2div4sf (TARGET_SSE \
&& (GET_MODE_NUNITS (V2DImode) \
== GET_MODE_NUNITS (V4SFmode)))
#define HAVE_vcondv2div2df ((TARGET_SSE \
&& (GET_MODE_NUNITS (V2DImode) \
== GET_MODE_NUNITS (V2DFmode))) && (TARGET_SSE2))
#define HAVE_vcondv4sfv4sf (TARGET_SSE \
&& (GET_MODE_NUNITS (V4SFmode) \
== GET_MODE_NUNITS (V4SFmode)))
#define HAVE_vcondv4sfv2df ((TARGET_SSE \
&& (GET_MODE_NUNITS (V4SFmode) \
== GET_MODE_NUNITS (V2DFmode))) && (TARGET_SSE2))
#define HAVE_vcondv2dfv4sf ((TARGET_SSE \
&& (GET_MODE_NUNITS (V2DFmode) \
== GET_MODE_NUNITS (V4SFmode))) && (TARGET_SSE2))
#define HAVE_vcondv2dfv2df ((TARGET_SSE \
&& (GET_MODE_NUNITS (V2DFmode) \
== GET_MODE_NUNITS (V2DFmode))) && (TARGET_SSE2))
#define HAVE_vcond_mask_v16sihi (TARGET_AVX512F)
#define HAVE_vcond_mask_v8siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vcond_mask_v4siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vcond_mask_v8diqi (TARGET_AVX512F)
#define HAVE_vcond_mask_v4diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vcond_mask_v2diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vcond_mask_v16sfhi (TARGET_AVX512F)
#define HAVE_vcond_mask_v8sfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vcond_mask_v4sfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vcond_mask_v8dfqi (TARGET_AVX512F)
#define HAVE_vcond_mask_v4dfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vcond_mask_v2dfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vcond_mask_v64qidi (TARGET_AVX512BW)
#define HAVE_vcond_mask_v16qihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_vcond_mask_v32qisi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_vcond_mask_v32hisi (TARGET_AVX512BW)
#define HAVE_vcond_mask_v16hihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_vcond_mask_v8hiqi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_vcond_mask_v32qiv32qi (TARGET_AVX2)
#define HAVE_vcond_mask_v16hiv16hi (TARGET_AVX2)
#define HAVE_vcond_mask_v8siv8si (TARGET_AVX2)
#define HAVE_vcond_mask_v4div4di (TARGET_AVX2)
#define HAVE_vcond_mask_v16qiv16qi (TARGET_SSE2)
#define HAVE_vcond_mask_v8hiv8hi (TARGET_SSE2)
#define HAVE_vcond_mask_v4siv4si (TARGET_SSE2)
#define HAVE_vcond_mask_v2div2di (TARGET_SSE4_2)
#define HAVE_vcond_mask_v8sfv8si (TARGET_AVX)
#define HAVE_vcond_mask_v4dfv4di (TARGET_AVX)
#define HAVE_vcond_mask_v4sfv4si (TARGET_SSE)
#define HAVE_vcond_mask_v2dfv2di ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_andv8sf3 ((TARGET_SSE && 1) && (TARGET_AVX))
#define HAVE_andv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_AVX)))
#define HAVE_iorv8sf3 ((TARGET_SSE && 1) && (TARGET_AVX))
#define HAVE_iorv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_AVX)))
#define HAVE_xorv8sf3 ((TARGET_SSE && 1) && (TARGET_AVX))
#define HAVE_xorv8sf3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_AVX)))
#define HAVE_andv4sf3 (TARGET_SSE && 1)
#define HAVE_andv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && TARGET_AVX512VL))
#define HAVE_iorv4sf3 (TARGET_SSE && 1)
#define HAVE_iorv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && TARGET_AVX512VL))
#define HAVE_xorv4sf3 (TARGET_SSE && 1)
#define HAVE_xorv4sf3_mask ((TARGET_AVX512F) && (TARGET_SSE && TARGET_AVX512VL))
#define HAVE_andv4df3 ((TARGET_SSE && 1) && (TARGET_AVX))
#define HAVE_andv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_AVX)))
#define HAVE_iorv4df3 ((TARGET_SSE && 1) && (TARGET_AVX))
#define HAVE_iorv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_AVX)))
#define HAVE_xorv4df3 ((TARGET_SSE && 1) && (TARGET_AVX))
#define HAVE_xorv4df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_AVX)))
#define HAVE_andv2df3 ((TARGET_SSE && 1) && (TARGET_SSE2))
#define HAVE_andv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_SSE2)))
#define HAVE_iorv2df3 ((TARGET_SSE && 1) && (TARGET_SSE2))
#define HAVE_iorv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_SSE2)))
#define HAVE_xorv2df3 ((TARGET_SSE && 1) && (TARGET_SSE2))
#define HAVE_xorv2df3_mask ((TARGET_AVX512F) && ((TARGET_SSE && TARGET_AVX512VL) && (TARGET_SSE2)))
#define HAVE_andv16sf3 (TARGET_AVX512F)
#define HAVE_andv16sf3_mask (TARGET_AVX512F)
#define HAVE_iorv16sf3 (TARGET_AVX512F)
#define HAVE_iorv16sf3_mask (TARGET_AVX512F)
#define HAVE_xorv16sf3 (TARGET_AVX512F)
#define HAVE_xorv16sf3_mask (TARGET_AVX512F)
#define HAVE_andv8df3 (TARGET_AVX512F)
#define HAVE_andv8df3_mask (TARGET_AVX512F)
#define HAVE_iorv8df3 (TARGET_AVX512F)
#define HAVE_iorv8df3_mask (TARGET_AVX512F)
#define HAVE_xorv8df3 (TARGET_AVX512F)
#define HAVE_xorv8df3_mask (TARGET_AVX512F)
#define HAVE_copysignv16sf3 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_copysignv8sf3 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_copysignv4sf3 (TARGET_SSE)
#define HAVE_copysignv8df3 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_copysignv4df3 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_copysignv2df3 ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_andtf3 (TARGET_SSE)
#define HAVE_iortf3 (TARGET_SSE)
#define HAVE_xortf3 (TARGET_SSE)
#define HAVE_fmasf4 (TARGET_SSE_MATH && (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F))
#define HAVE_fmadf4 (TARGET_SSE_MATH && (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F))
#define HAVE_fmav4sf4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fmav2df4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fmav8sf4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fmav4df4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fmav16sf4 (TARGET_AVX512F)
#define HAVE_fmav8df4 (TARGET_AVX512F)
#define HAVE_fmssf4 (TARGET_SSE_MATH && (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F))
#define HAVE_fmsdf4 (TARGET_SSE_MATH && (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F))
#define HAVE_fmsv4sf4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fmsv2df4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fmsv8sf4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fmsv4df4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fmsv16sf4 (TARGET_AVX512F)
#define HAVE_fmsv8df4 (TARGET_AVX512F)
#define HAVE_fnmasf4 (TARGET_SSE_MATH && (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F))
#define HAVE_fnmadf4 (TARGET_SSE_MATH && (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F))
#define HAVE_fnmav4sf4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fnmav2df4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fnmav8sf4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fnmav4df4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fnmav16sf4 (TARGET_AVX512F)
#define HAVE_fnmav8df4 (TARGET_AVX512F)
#define HAVE_fnmssf4 (TARGET_SSE_MATH && (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F))
#define HAVE_fnmsdf4 (TARGET_SSE_MATH && (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F))
#define HAVE_fnmsv4sf4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fnmsv2df4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fnmsv8sf4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fnmsv4df4 (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fnmsv16sf4 (TARGET_AVX512F)
#define HAVE_fnmsv8df4 (TARGET_AVX512F)
#define HAVE_fma4i_fmadd_sf (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F)
#define HAVE_fma4i_fmadd_df (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F)
#define HAVE_fma4i_fmadd_v4sf (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fma4i_fmadd_v2df (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fma4i_fmadd_v8sf (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fma4i_fmadd_v4df (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512VL)
#define HAVE_fma4i_fmadd_v16sf (TARGET_AVX512F)
#define HAVE_fma4i_fmadd_v8df (TARGET_AVX512F)
#define HAVE_avx512f_fmadd_v16sf_maskz (TARGET_AVX512F && 1)
#define HAVE_avx512f_fmadd_v16sf_maskz_round ((TARGET_AVX512F) && (TARGET_AVX512F && 1))
#define HAVE_avx512vl_fmadd_v8sf_maskz ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmadd_v8sf_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fmadd_v4sf_maskz ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmadd_v4sf_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
#define HAVE_avx512f_fmadd_v8df_maskz (TARGET_AVX512F && 1)
#define HAVE_avx512f_fmadd_v8df_maskz_round ((TARGET_AVX512F) && (TARGET_AVX512F && 1))
#define HAVE_avx512vl_fmadd_v4df_maskz ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmadd_v4df_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fmadd_v2df_maskz ((TARGET_AVX512F && 1) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmadd_v2df_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F && 1) && (TARGET_AVX512VL)))
#define HAVE_fmaddsub_v16sf ((TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F) && (TARGET_AVX512F))
#define HAVE_fmaddsub_v8sf ((TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F) && (TARGET_AVX))
#define HAVE_fmaddsub_v4sf (TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F)
#define HAVE_fmaddsub_v8df ((TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F) && (TARGET_AVX512F))
#define HAVE_fmaddsub_v4df ((TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F) && (TARGET_AVX))
#define HAVE_fmaddsub_v2df ((TARGET_FMA || TARGET_FMA4 || TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_avx512f_fmaddsub_v16sf_maskz (TARGET_AVX512F)
#define HAVE_avx512f_fmaddsub_v16sf_maskz_round (TARGET_AVX512F)
#define HAVE_avx512vl_fmaddsub_v8sf_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmaddsub_v8sf_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fmaddsub_v4sf_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmaddsub_v4sf_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_fmaddsub_v8df_maskz (TARGET_AVX512F)
#define HAVE_avx512f_fmaddsub_v8df_maskz_round (TARGET_AVX512F)
#define HAVE_avx512vl_fmaddsub_v4df_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmaddsub_v4df_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fmaddsub_v2df_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fmaddsub_v2df_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_fmai_vmfmadd_v4sf (TARGET_FMA)
#define HAVE_fmai_vmfmadd_v4sf_round ((TARGET_AVX512F) && (TARGET_FMA))
#define HAVE_fmai_vmfmadd_v2df ((TARGET_FMA) && (TARGET_SSE2))
#define HAVE_fmai_vmfmadd_v2df_round ((TARGET_AVX512F) && ((TARGET_FMA) && (TARGET_SSE2)))
#define HAVE_fma4i_vmfmadd_v4sf (TARGET_FMA4)
#define HAVE_fma4i_vmfmadd_v2df ((TARGET_FMA4) && (TARGET_SSE2))
#define HAVE_floatunsv16siv16sf2 ((TARGET_SSE2 && (V16SFmode == V4SFmode || TARGET_AVX2)) && (TARGET_AVX512F))
#define HAVE_floatunsv8siv8sf2 ((TARGET_SSE2 && (V8SFmode == V4SFmode || TARGET_AVX2)) && (TARGET_AVX))
#define HAVE_floatunsv4siv4sf2 (TARGET_SSE2 && (V4SFmode == V4SFmode || TARGET_AVX2))
#define HAVE_fixuns_truncv16sfv16si2 ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_fixuns_truncv8sfv8si2 ((TARGET_SSE2) && (TARGET_AVX))
#define HAVE_fixuns_truncv4sfv4si2 (TARGET_SSE2)
#define HAVE_avx_cvtpd2dq256_2 (TARGET_AVX)
#define HAVE_avx_cvttpd2dq256_2 (TARGET_AVX)
#define HAVE_sse2_cvtpd2ps (TARGET_SSE2)
#define HAVE_sse2_cvtpd2ps_mask (TARGET_SSE2)
#define HAVE_avx512bw_cvtmask2bv64qi (TARGET_AVX512BW)
#define HAVE_avx512vl_cvtmask2bv16qi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cvtmask2bv32qi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512bw_cvtmask2wv32hi (TARGET_AVX512BW)
#define HAVE_avx512vl_cvtmask2wv16hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cvtmask2wv8hi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512f_cvtmask2dv16si (TARGET_AVX512DQ)
#define HAVE_avx512vl_cvtmask2dv8si ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cvtmask2dv4si ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
#define HAVE_avx512f_cvtmask2qv8di (TARGET_AVX512DQ)
#define HAVE_avx512vl_cvtmask2qv4di ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
#define HAVE_avx512vl_cvtmask2qv2di ((TARGET_AVX512DQ) && (TARGET_AVX512VL))
#define HAVE_vec_unpacks_hi_v4sf (TARGET_SSE2)
#define HAVE_vec_unpacks_hi_v8sf (TARGET_AVX)
#define HAVE_vec_unpacks_hi_v16sf (TARGET_AVX512F)
#define HAVE_vec_unpacks_lo_v4sf (TARGET_SSE2)
#define HAVE_vec_unpacks_lo_v8sf (TARGET_AVX)
#define HAVE_vec_unpacks_float_hi_v32hi ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_vec_unpacks_float_hi_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_unpacks_float_hi_v8hi (TARGET_SSE2)
#define HAVE_vec_unpacks_float_lo_v32hi ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_vec_unpacks_float_lo_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_unpacks_float_lo_v8hi (TARGET_SSE2)
#define HAVE_vec_unpacku_float_hi_v32hi ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_vec_unpacku_float_hi_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_unpacku_float_hi_v8hi (TARGET_SSE2)
#define HAVE_vec_unpacku_float_lo_v32hi ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_vec_unpacku_float_lo_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_unpacku_float_lo_v8hi (TARGET_SSE2)
#define HAVE_vec_unpacks_float_hi_v4si (TARGET_SSE2)
#define HAVE_vec_unpacks_float_lo_v4si (TARGET_SSE2)
#define HAVE_vec_unpacks_float_hi_v8si (TARGET_AVX)
#define HAVE_vec_unpacks_float_lo_v8si (TARGET_AVX)
#define HAVE_vec_unpacks_float_hi_v16si (TARGET_AVX512F)
#define HAVE_vec_unpacks_float_lo_v16si (TARGET_AVX512F)
#define HAVE_vec_unpacku_float_hi_v4si (TARGET_SSE2)
#define HAVE_vec_unpacku_float_lo_v4si (TARGET_SSE2)
#define HAVE_vec_unpacku_float_hi_v8si (TARGET_AVX)
#define HAVE_vec_unpacku_float_hi_v16si (TARGET_AVX512F)
#define HAVE_vec_unpacku_float_lo_v8si (TARGET_AVX)
#define HAVE_vec_unpacku_float_lo_v16si (TARGET_AVX512F)
#define HAVE_vec_pack_trunc_v8df ((TARGET_AVX) && (TARGET_AVX512F))
#define HAVE_vec_pack_trunc_v4df (TARGET_AVX)
#define HAVE_vec_pack_trunc_v2df (TARGET_SSE2)
#define HAVE_vec_pack_sfix_trunc_v8df (TARGET_AVX512F)
#define HAVE_vec_pack_sfix_trunc_v4df (TARGET_AVX)
#define HAVE_vec_pack_sfix_trunc_v2df (TARGET_SSE2)
#define HAVE_vec_pack_ufix_trunc_v8df ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_vec_pack_ufix_trunc_v4df ((TARGET_SSE2) && (TARGET_AVX))
#define HAVE_vec_pack_ufix_trunc_v2df (TARGET_SSE2)
#define HAVE_avx512f_vec_pack_sfix_v8df (TARGET_AVX512F)
#define HAVE_vec_pack_sfix_v4df (TARGET_AVX)
#define HAVE_vec_pack_sfix_v2df (TARGET_SSE2)
#define HAVE_sse_movhlps_exp (TARGET_SSE)
#define HAVE_sse_movlhps_exp (TARGET_SSE)
#define HAVE_vec_interleave_highv8sf (TARGET_AVX)
#define HAVE_vec_interleave_lowv8sf (TARGET_AVX)
#define HAVE_avx_shufps256 (TARGET_AVX)
#define HAVE_avx_shufps256_mask ((TARGET_AVX512VL) && (TARGET_AVX))
#define HAVE_sse_shufps (TARGET_SSE)
#define HAVE_sse_shufps_mask ((TARGET_AVX512VL) && (TARGET_SSE))
#define HAVE_sse_loadhps_exp (TARGET_SSE)
#define HAVE_sse_loadlps_exp (TARGET_SSE)
#define HAVE_vec_initv16qi (TARGET_SSE)
#define HAVE_vec_initv8hi (TARGET_SSE)
#define HAVE_vec_initv4si (TARGET_SSE)
#define HAVE_vec_initv2di (TARGET_SSE)
#define HAVE_vec_initv4sf (TARGET_SSE)
#define HAVE_vec_initv2df ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_vec_setv32qi ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_vec_setv16qi (TARGET_SSE)
#define HAVE_vec_setv16hi ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_vec_setv8hi (TARGET_SSE)
#define HAVE_vec_setv16si ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_vec_setv8si ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_vec_setv4si (TARGET_SSE)
#define HAVE_vec_setv8di ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_vec_setv4di ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_vec_setv2di (TARGET_SSE)
#define HAVE_vec_setv16sf ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_vec_setv8sf ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_vec_setv4sf (TARGET_SSE)
#define HAVE_vec_setv8df ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_vec_setv4df ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_vec_setv2df ((TARGET_SSE) && (TARGET_SSE2))
#define HAVE_avx512dq_vextractf64x2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_avx512dq_vextracti64x2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_avx512f_vextractf32x4_mask (TARGET_AVX512F)
#define HAVE_avx512f_vextracti32x4_mask (TARGET_AVX512F)
#define HAVE_avx512dq_vextractf32x8_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_avx512dq_vextracti32x8_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_avx512f_vextractf64x4_mask (TARGET_AVX512F)
#define HAVE_avx512f_vextracti64x4_mask (TARGET_AVX512F)
#define HAVE_avx512vl_vextractf128v8si (TARGET_AVX512DQ && TARGET_AVX512VL)
#define HAVE_avx512vl_vextractf128v8sf (TARGET_AVX512DQ && TARGET_AVX512VL)
#define HAVE_avx512vl_vextractf128v4di (TARGET_AVX512DQ && TARGET_AVX512VL)
#define HAVE_avx512vl_vextractf128v4df (TARGET_AVX512DQ && TARGET_AVX512VL)
#define HAVE_avx_vextractf128v32qi (TARGET_AVX)
#define HAVE_avx_vextractf128v16hi (TARGET_AVX)
#define HAVE_avx_vextractf128v8si (TARGET_AVX)
#define HAVE_avx_vextractf128v4di (TARGET_AVX)
#define HAVE_avx_vextractf128v8sf (TARGET_AVX)
#define HAVE_avx_vextractf128v4df (TARGET_AVX)
#define HAVE_vec_extractv64qi ((TARGET_SSE) && (TARGET_AVX512BW))
#define HAVE_vec_extractv32qi ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_vec_extractv16qi (TARGET_SSE)
#define HAVE_vec_extractv32hi ((TARGET_SSE) && (TARGET_AVX512BW))
#define HAVE_vec_extractv16hi ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_vec_extractv8hi (TARGET_SSE)
#define HAVE_vec_extractv16si ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_vec_extractv8si ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_vec_extractv4si (TARGET_SSE)
#define HAVE_vec_extractv8di ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_vec_extractv4di ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_vec_extractv2di (TARGET_SSE)
#define HAVE_vec_extractv16sf ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_vec_extractv8sf ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_vec_extractv4sf (TARGET_SSE)
#define HAVE_vec_extractv8df ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_vec_extractv4df ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_vec_extractv2df (TARGET_SSE)
#define HAVE_vec_interleave_highv4df (TARGET_AVX)
#define HAVE_vec_interleave_highv2df (TARGET_SSE2)
#define HAVE_avx512f_movddup512 (TARGET_AVX512F)
#define HAVE_avx512f_movddup512_mask (TARGET_AVX512F)
#define HAVE_avx512f_unpcklpd512 (TARGET_AVX512F)
#define HAVE_avx512f_unpcklpd512_mask (TARGET_AVX512F)
#define HAVE_avx_movddup256 (TARGET_AVX && 1)
#define HAVE_avx_movddup256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
#define HAVE_avx_unpcklpd256 (TARGET_AVX && 1)
#define HAVE_avx_unpcklpd256_mask ((TARGET_AVX512F) && (TARGET_AVX && TARGET_AVX512VL))
#define HAVE_vec_interleave_lowv4df (TARGET_AVX)
#define HAVE_vec_interleave_lowv2df (TARGET_SSE2)
#define HAVE_avx512f_vternlogv16si_maskz (TARGET_AVX512F)
#define HAVE_avx512vl_vternlogv8si_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vternlogv4si_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_vternlogv8di_maskz (TARGET_AVX512F)
#define HAVE_avx512vl_vternlogv4di_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vternlogv2di_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_shufps512_mask (TARGET_AVX512F)
#define HAVE_avx512f_fixupimmv16sf_maskz (TARGET_AVX512F)
#define HAVE_avx512f_fixupimmv16sf_maskz_round (TARGET_AVX512F)
#define HAVE_avx512vl_fixupimmv8sf_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fixupimmv8sf_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fixupimmv4sf_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fixupimmv4sf_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_fixupimmv8df_maskz (TARGET_AVX512F)
#define HAVE_avx512f_fixupimmv8df_maskz_round (TARGET_AVX512F)
#define HAVE_avx512vl_fixupimmv4df_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fixupimmv4df_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_fixupimmv2df_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_fixupimmv2df_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_sfixupimmv4sf_maskz (TARGET_AVX512F)
#define HAVE_avx512f_sfixupimmv4sf_maskz_round (TARGET_AVX512F)
#define HAVE_avx512f_sfixupimmv2df_maskz ((TARGET_AVX512F) && (TARGET_SSE2))
#define HAVE_avx512f_sfixupimmv2df_maskz_round ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_SSE2)))
#define HAVE_avx512f_shufpd512_mask (TARGET_AVX512F)
#define HAVE_avx_shufpd256 (TARGET_AVX)
#define HAVE_avx_shufpd256_mask ((TARGET_AVX512VL) && (TARGET_AVX))
#define HAVE_sse2_shufpd (TARGET_SSE2)
#define HAVE_sse2_shufpd_mask ((TARGET_AVX512VL) && (TARGET_SSE2))
#define HAVE_sse2_loadhpd_exp (TARGET_SSE2)
#define HAVE_sse2_loadlpd_exp (TARGET_SSE2)
#define HAVE_avx512f_ss_truncatev16siv16qi2_mask_store (TARGET_AVX512F)
#define HAVE_avx512f_truncatev16siv16qi2_mask_store (TARGET_AVX512F)
#define HAVE_avx512f_us_truncatev16siv16qi2_mask_store (TARGET_AVX512F)
#define HAVE_avx512f_ss_truncatev16siv16hi2_mask_store (TARGET_AVX512F)
#define HAVE_avx512f_truncatev16siv16hi2_mask_store (TARGET_AVX512F)
#define HAVE_avx512f_us_truncatev16siv16hi2_mask_store (TARGET_AVX512F)
#define HAVE_avx512f_ss_truncatev8div8si2_mask_store (TARGET_AVX512F)
#define HAVE_avx512f_truncatev8div8si2_mask_store (TARGET_AVX512F)
#define HAVE_avx512f_us_truncatev8div8si2_mask_store (TARGET_AVX512F)
#define HAVE_avx512f_ss_truncatev8div8hi2_mask_store (TARGET_AVX512F)
#define HAVE_avx512f_truncatev8div8hi2_mask_store (TARGET_AVX512F)
#define HAVE_avx512f_us_truncatev8div8hi2_mask_store (TARGET_AVX512F)
#define HAVE_avx512bw_ss_truncatev32hiv32qi2_mask_store (TARGET_AVX512BW)
#define HAVE_avx512bw_truncatev32hiv32qi2_mask_store (TARGET_AVX512BW)
#define HAVE_avx512bw_us_truncatev32hiv32qi2_mask_store (TARGET_AVX512BW)
#define HAVE_avx512vl_ss_truncatev4div4si2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev4div4si2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev4div4si2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev8siv8hi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_truncatev8siv8hi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_us_truncatev8siv8hi2_mask_store (TARGET_AVX512VL)
#define HAVE_avx512vl_ss_truncatev16hiv16qi2_mask_store ((TARGET_AVX512VL) && (TARGET_AVX512BW))
#define HAVE_avx512vl_truncatev16hiv16qi2_mask_store ((TARGET_AVX512VL) && (TARGET_AVX512BW))
#define HAVE_avx512vl_us_truncatev16hiv16qi2_mask_store ((TARGET_AVX512VL) && (TARGET_AVX512BW))
#define HAVE_negv64qi2 ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_negv32qi2 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_negv16qi2 (TARGET_SSE2)
#define HAVE_negv32hi2 ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_negv16hi2 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_negv8hi2 (TARGET_SSE2)
#define HAVE_negv16si2 ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_negv8si2 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_negv4si2 (TARGET_SSE2)
#define HAVE_negv8di2 ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_negv4di2 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_negv2di2 (TARGET_SSE2)
#define HAVE_addv64qi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_subv64qi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_addv32qi3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_subv32qi3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_addv16qi3 (TARGET_SSE2)
#define HAVE_subv16qi3 (TARGET_SSE2)
#define HAVE_addv32hi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_subv32hi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_addv16hi3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_subv16hi3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_addv8hi3 (TARGET_SSE2)
#define HAVE_subv8hi3 (TARGET_SSE2)
#define HAVE_addv16si3 ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_subv16si3 ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_addv8si3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_subv8si3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_addv4si3 (TARGET_SSE2)
#define HAVE_subv4si3 (TARGET_SSE2)
#define HAVE_addv8di3 ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_subv8di3 ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_addv4di3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_subv4di3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_addv2di3 (TARGET_SSE2)
#define HAVE_subv2di3 (TARGET_SSE2)
#define HAVE_addv16si3_mask (TARGET_AVX512F)
#define HAVE_subv16si3_mask (TARGET_AVX512F)
#define HAVE_addv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_subv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_addv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_subv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_addv8di3_mask (TARGET_AVX512F)
#define HAVE_subv8di3_mask (TARGET_AVX512F)
#define HAVE_addv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_subv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_addv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_subv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_addv64qi3_mask (TARGET_AVX512BW)
#define HAVE_subv64qi3_mask (TARGET_AVX512BW)
#define HAVE_addv16qi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_subv16qi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_addv32qi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_subv32qi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_addv32hi3_mask (TARGET_AVX512BW)
#define HAVE_subv32hi3_mask (TARGET_AVX512BW)
#define HAVE_addv16hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_subv16hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_addv8hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_subv8hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512bw_ssaddv64qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_avx512bw_ssaddv64qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_avx512bw_usaddv64qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_avx512bw_usaddv64qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_avx512bw_sssubv64qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_avx512bw_sssubv64qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_avx512bw_ussubv64qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_avx512bw_ussubv64qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_avx2_ssaddv32qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
#define HAVE_avx2_ssaddv32qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_avx2_usaddv32qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
#define HAVE_avx2_usaddv32qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_avx2_sssubv32qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
#define HAVE_avx2_sssubv32qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_avx2_ussubv32qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
#define HAVE_avx2_ussubv32qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_sse2_ssaddv16qi3 (TARGET_SSE2 && 1 && 1)
#define HAVE_sse2_ssaddv16qi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_sse2_usaddv16qi3 (TARGET_SSE2 && 1 && 1)
#define HAVE_sse2_usaddv16qi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_sse2_sssubv16qi3 (TARGET_SSE2 && 1 && 1)
#define HAVE_sse2_sssubv16qi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_sse2_ussubv16qi3 (TARGET_SSE2 && 1 && 1)
#define HAVE_sse2_ussubv16qi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_avx512bw_ssaddv32hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_avx512bw_ssaddv32hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_avx512bw_usaddv32hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_avx512bw_usaddv32hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_avx512bw_sssubv32hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_avx512bw_sssubv32hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_avx512bw_ussubv32hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_avx512bw_ussubv32hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_avx2_ssaddv16hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
#define HAVE_avx2_ssaddv16hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_avx2_usaddv16hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
#define HAVE_avx2_usaddv16hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_avx2_sssubv16hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
#define HAVE_avx2_sssubv16hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_avx2_ussubv16hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
#define HAVE_avx2_ussubv16hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_sse2_ssaddv8hi3 (TARGET_SSE2 && 1 && 1)
#define HAVE_sse2_ssaddv8hi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_sse2_usaddv8hi3 (TARGET_SSE2 && 1 && 1)
#define HAVE_sse2_usaddv8hi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_sse2_sssubv8hi3 (TARGET_SSE2 && 1 && 1)
#define HAVE_sse2_sssubv8hi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_sse2_ussubv8hi3 (TARGET_SSE2 && 1 && 1)
#define HAVE_sse2_ussubv8hi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_mulv64qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_mulv64qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_mulv32qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
#define HAVE_mulv32qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_mulv16qi3 (TARGET_SSE2 && 1 && 1)
#define HAVE_mulv16qi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_mulv32hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_mulv32hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_mulv16hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
#define HAVE_mulv16hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_mulv8hi3 (TARGET_SSE2 && 1 && 1)
#define HAVE_mulv8hi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_smulv32hi3_highpart ((TARGET_SSE2 \
&& 1 && 1) && (TARGET_AVX512BW))
#define HAVE_smulv32hi3_highpart_mask ((TARGET_AVX512F) && ((TARGET_SSE2 \
&& (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_umulv32hi3_highpart ((TARGET_SSE2 \
&& 1 && 1) && (TARGET_AVX512BW))
#define HAVE_umulv32hi3_highpart_mask ((TARGET_AVX512F) && ((TARGET_SSE2 \
&& (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_smulv16hi3_highpart ((TARGET_SSE2 \
&& 1 && 1) && (TARGET_AVX2))
#define HAVE_smulv16hi3_highpart_mask ((TARGET_AVX512F) && ((TARGET_SSE2 \
&& (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_umulv16hi3_highpart ((TARGET_SSE2 \
&& 1 && 1) && (TARGET_AVX2))
#define HAVE_umulv16hi3_highpart_mask ((TARGET_AVX512F) && ((TARGET_SSE2 \
&& (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_smulv8hi3_highpart (TARGET_SSE2 \
&& 1 && 1)
#define HAVE_smulv8hi3_highpart_mask ((TARGET_AVX512F) && (TARGET_SSE2 \
&& (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_umulv8hi3_highpart (TARGET_SSE2 \
&& 1 && 1)
#define HAVE_umulv8hi3_highpart_mask ((TARGET_AVX512F) && (TARGET_SSE2 \
&& (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_vec_widen_umult_even_v16si (TARGET_AVX512F)
#define HAVE_vec_widen_umult_even_v16si_mask (TARGET_AVX512F)
#define HAVE_vec_widen_umult_even_v8si (TARGET_AVX2 && 1)
#define HAVE_vec_widen_umult_even_v8si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
#define HAVE_vec_widen_umult_even_v4si (TARGET_SSE2 && 1)
#define HAVE_vec_widen_umult_even_v4si_mask ((TARGET_AVX512F) && (TARGET_SSE2 && TARGET_AVX512VL))
#define HAVE_vec_widen_smult_even_v16si (TARGET_AVX512F)
#define HAVE_vec_widen_smult_even_v16si_mask (TARGET_AVX512F)
#define HAVE_vec_widen_smult_even_v8si (TARGET_AVX2 && 1)
#define HAVE_vec_widen_smult_even_v8si_mask ((TARGET_AVX512F) && (TARGET_AVX2 && TARGET_AVX512VL))
#define HAVE_sse4_1_mulv2siv2di3 (TARGET_SSE4_1 && 1)
#define HAVE_sse4_1_mulv2siv2di3_mask ((TARGET_AVX512F) && (TARGET_SSE4_1 && TARGET_AVX512VL))
#define HAVE_avx2_pmaddwd (TARGET_AVX2)
#define HAVE_sse2_pmaddwd (TARGET_SSE2)
#define HAVE_mulv16si3 ((TARGET_SSE2 && 1) && (TARGET_AVX512F))
#define HAVE_mulv16si3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_mulv8si3 ((TARGET_SSE2 && 1) && (TARGET_AVX2))
#define HAVE_mulv8si3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX2)))
#define HAVE_mulv4si3 (TARGET_SSE2 && 1)
#define HAVE_mulv4si3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL)))
#define HAVE_mulv8di3 ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_mulv4di3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_mulv2di3 (TARGET_SSE2)
#define HAVE_vec_widen_smult_hi_v32qi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_widen_umult_hi_v32qi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_widen_smult_hi_v16qi (TARGET_SSE2)
#define HAVE_vec_widen_umult_hi_v16qi (TARGET_SSE2)
#define HAVE_vec_widen_smult_hi_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_widen_umult_hi_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_widen_smult_hi_v8hi (TARGET_SSE2)
#define HAVE_vec_widen_umult_hi_v8hi (TARGET_SSE2)
#define HAVE_vec_widen_smult_hi_v8si ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_widen_umult_hi_v8si ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_widen_smult_hi_v4si (TARGET_SSE2)
#define HAVE_vec_widen_umult_hi_v4si (TARGET_SSE2)
#define HAVE_vec_widen_smult_lo_v32qi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_widen_umult_lo_v32qi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_widen_smult_lo_v16qi (TARGET_SSE2)
#define HAVE_vec_widen_umult_lo_v16qi (TARGET_SSE2)
#define HAVE_vec_widen_smult_lo_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_widen_umult_lo_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_widen_smult_lo_v8hi (TARGET_SSE2)
#define HAVE_vec_widen_umult_lo_v8hi (TARGET_SSE2)
#define HAVE_vec_widen_smult_lo_v8si ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_widen_umult_lo_v8si ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_widen_smult_lo_v4si (TARGET_SSE2)
#define HAVE_vec_widen_umult_lo_v4si (TARGET_SSE2)
#define HAVE_vec_widen_smult_even_v4si (TARGET_SSE2)
#define HAVE_vec_widen_smult_odd_v16si ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_vec_widen_umult_odd_v16si ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_vec_widen_smult_odd_v8si ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_widen_umult_odd_v8si ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_widen_smult_odd_v4si (TARGET_SSE2)
#define HAVE_vec_widen_umult_odd_v4si (TARGET_SSE2)
#define HAVE_sdot_prodv32hi ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_sdot_prodv16hi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_sdot_prodv8hi (TARGET_SSE2)
#define HAVE_sdot_prodv4si (TARGET_XOP)
#define HAVE_usadv16qi (TARGET_SSE2)
#define HAVE_usadv32qi (TARGET_AVX2)
#define HAVE_vec_shl_v16qi (TARGET_SSE2)
#define HAVE_vec_shl_v8hi (TARGET_SSE2)
#define HAVE_vec_shl_v4si (TARGET_SSE2)
#define HAVE_vec_shl_v2di (TARGET_SSE2)
#define HAVE_vec_shr_v16qi (TARGET_SSE2)
#define HAVE_vec_shr_v8hi (TARGET_SSE2)
#define HAVE_vec_shr_v4si (TARGET_SSE2)
#define HAVE_vec_shr_v2di (TARGET_SSE2)
#define HAVE_smaxv32qi3 (TARGET_AVX2)
#define HAVE_sminv32qi3 (TARGET_AVX2)
#define HAVE_umaxv32qi3 (TARGET_AVX2)
#define HAVE_uminv32qi3 (TARGET_AVX2)
#define HAVE_smaxv16hi3 (TARGET_AVX2)
#define HAVE_sminv16hi3 (TARGET_AVX2)
#define HAVE_umaxv16hi3 (TARGET_AVX2)
#define HAVE_uminv16hi3 (TARGET_AVX2)
#define HAVE_smaxv8si3 (TARGET_AVX2)
#define HAVE_sminv8si3 (TARGET_AVX2)
#define HAVE_umaxv8si3 (TARGET_AVX2)
#define HAVE_uminv8si3 (TARGET_AVX2)
#define HAVE_smaxv64qi3 ((TARGET_AVX2) && (TARGET_AVX512BW))
#define HAVE_sminv64qi3 ((TARGET_AVX2) && (TARGET_AVX512BW))
#define HAVE_umaxv64qi3 ((TARGET_AVX2) && (TARGET_AVX512BW))
#define HAVE_uminv64qi3 ((TARGET_AVX2) && (TARGET_AVX512BW))
#define HAVE_smaxv32hi3 ((TARGET_AVX2) && (TARGET_AVX512BW))
#define HAVE_sminv32hi3 ((TARGET_AVX2) && (TARGET_AVX512BW))
#define HAVE_umaxv32hi3 ((TARGET_AVX2) && (TARGET_AVX512BW))
#define HAVE_uminv32hi3 ((TARGET_AVX2) && (TARGET_AVX512BW))
#define HAVE_smaxv16si3 ((TARGET_AVX2) && (TARGET_AVX512F))
#define HAVE_sminv16si3 ((TARGET_AVX2) && (TARGET_AVX512F))
#define HAVE_umaxv16si3 ((TARGET_AVX2) && (TARGET_AVX512F))
#define HAVE_uminv16si3 ((TARGET_AVX2) && (TARGET_AVX512F))
#define HAVE_smaxv16si3_mask (TARGET_AVX512F)
#define HAVE_sminv16si3_mask (TARGET_AVX512F)
#define HAVE_umaxv16si3_mask (TARGET_AVX512F)
#define HAVE_uminv16si3_mask (TARGET_AVX512F)
#define HAVE_smaxv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_sminv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_umaxv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_uminv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_smaxv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_sminv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_umaxv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_uminv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_smaxv8di3_mask (TARGET_AVX512F)
#define HAVE_sminv8di3_mask (TARGET_AVX512F)
#define HAVE_umaxv8di3_mask (TARGET_AVX512F)
#define HAVE_uminv8di3_mask (TARGET_AVX512F)
#define HAVE_smaxv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_sminv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_umaxv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_uminv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_smaxv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_sminv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_umaxv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_uminv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_smaxv8di3 ((TARGET_SSE4_2) && (TARGET_AVX512F))
#define HAVE_sminv8di3 ((TARGET_SSE4_2) && (TARGET_AVX512F))
#define HAVE_umaxv8di3 ((TARGET_SSE4_2) && (TARGET_AVX512F))
#define HAVE_uminv8di3 ((TARGET_SSE4_2) && (TARGET_AVX512F))
#define HAVE_smaxv4di3 ((TARGET_SSE4_2) && (TARGET_AVX2))
#define HAVE_sminv4di3 ((TARGET_SSE4_2) && (TARGET_AVX2))
#define HAVE_umaxv4di3 ((TARGET_SSE4_2) && (TARGET_AVX2))
#define HAVE_uminv4di3 ((TARGET_SSE4_2) && (TARGET_AVX2))
#define HAVE_smaxv2di3 (TARGET_SSE4_2)
#define HAVE_sminv2di3 (TARGET_SSE4_2)
#define HAVE_umaxv2di3 (TARGET_SSE4_2)
#define HAVE_uminv2di3 (TARGET_SSE4_2)
#define HAVE_smaxv16qi3 (TARGET_SSE2)
#define HAVE_sminv16qi3 (TARGET_SSE2)
#define HAVE_smaxv8hi3 (TARGET_SSE2)
#define HAVE_sminv8hi3 (TARGET_SSE2)
#define HAVE_smaxv4si3 (TARGET_SSE2)
#define HAVE_sminv4si3 (TARGET_SSE2)
#define HAVE_umaxv16qi3 (TARGET_SSE2)
#define HAVE_uminv16qi3 (TARGET_SSE2)
#define HAVE_umaxv8hi3 (TARGET_SSE2)
#define HAVE_uminv8hi3 (TARGET_SSE2)
#define HAVE_umaxv4si3 (TARGET_SSE2)
#define HAVE_uminv4si3 (TARGET_SSE2)
#define HAVE_avx2_eqv32qi3 (TARGET_AVX2)
#define HAVE_avx2_eqv16hi3 (TARGET_AVX2)
#define HAVE_avx2_eqv8si3 (TARGET_AVX2)
#define HAVE_avx2_eqv4di3 (TARGET_AVX2)
#define HAVE_avx512bw_eqv64qi3 (TARGET_AVX512BW)
#define HAVE_avx512bw_eqv64qi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512vl_eqv16qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_eqv16qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_eqv32qi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_eqv32qi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512bw_eqv32hi3 (TARGET_AVX512BW)
#define HAVE_avx512bw_eqv32hi3_mask ((TARGET_AVX512F) && (TARGET_AVX512BW))
#define HAVE_avx512vl_eqv16hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_eqv16hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_eqv8hi3 ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_eqv8hi3_mask ((TARGET_AVX512F) && ((TARGET_AVX512BW) && (TARGET_AVX512VL)))
#define HAVE_avx512f_eqv16si3 (TARGET_AVX512F)
#define HAVE_avx512f_eqv16si3_mask (TARGET_AVX512F)
#define HAVE_avx512vl_eqv8si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_eqv8si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_eqv4si3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_eqv4si3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512f_eqv8di3 (TARGET_AVX512F)
#define HAVE_avx512f_eqv8di3_mask (TARGET_AVX512F)
#define HAVE_avx512vl_eqv4di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_eqv4di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_avx512vl_eqv2di3 ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_eqv2di3_mask ((TARGET_AVX512F) && ((TARGET_AVX512F) && (TARGET_AVX512VL)))
#define HAVE_sse2_eqv16qi3 (TARGET_SSE2 && !TARGET_XOP )
#define HAVE_sse2_eqv8hi3 (TARGET_SSE2 && !TARGET_XOP )
#define HAVE_sse2_eqv4si3 (TARGET_SSE2 && !TARGET_XOP )
#define HAVE_sse4_1_eqv2di3 (TARGET_SSE4_1)
#define HAVE_vcondv64qiv64qi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V64QImode) \
== GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
#define HAVE_vcondv32hiv64qi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V32HImode) \
== GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
#define HAVE_vcondv16siv64qi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SImode) \
== GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
#define HAVE_vcondv8div64qi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DImode) \
== GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
#define HAVE_vcondv16sfv64qi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SFmode) \
== GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
#define HAVE_vcondv8dfv64qi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DFmode) \
== GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
#define HAVE_vcondv64qiv32hi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V64QImode) \
== GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
#define HAVE_vcondv32hiv32hi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V32HImode) \
== GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
#define HAVE_vcondv16siv32hi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SImode) \
== GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
#define HAVE_vcondv8div32hi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DImode) \
== GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
#define HAVE_vcondv16sfv32hi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SFmode) \
== GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
#define HAVE_vcondv8dfv32hi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DFmode) \
== GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
#define HAVE_vcondv64qiv16si (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V64QImode) \
== GET_MODE_NUNITS (V16SImode)))
#define HAVE_vcondv32hiv16si (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V32HImode) \
== GET_MODE_NUNITS (V16SImode)))
#define HAVE_vcondv16siv16si (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SImode) \
== GET_MODE_NUNITS (V16SImode)))
#define HAVE_vcondv8div16si (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DImode) \
== GET_MODE_NUNITS (V16SImode)))
#define HAVE_vcondv16sfv16si (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SFmode) \
== GET_MODE_NUNITS (V16SImode)))
#define HAVE_vcondv8dfv16si (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DFmode) \
== GET_MODE_NUNITS (V16SImode)))
#define HAVE_vcondv64qiv8di (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V64QImode) \
== GET_MODE_NUNITS (V8DImode)))
#define HAVE_vcondv32hiv8di (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V32HImode) \
== GET_MODE_NUNITS (V8DImode)))
#define HAVE_vcondv16siv8di (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SImode) \
== GET_MODE_NUNITS (V8DImode)))
#define HAVE_vcondv8div8di (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DImode) \
== GET_MODE_NUNITS (V8DImode)))
#define HAVE_vcondv16sfv8di (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SFmode) \
== GET_MODE_NUNITS (V8DImode)))
#define HAVE_vcondv8dfv8di (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DFmode) \
== GET_MODE_NUNITS (V8DImode)))
#define HAVE_vcondv32qiv32qi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V32QImode) \
== GET_MODE_NUNITS (V32QImode)))
#define HAVE_vcondv16hiv32qi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V16HImode) \
== GET_MODE_NUNITS (V32QImode)))
#define HAVE_vcondv8siv32qi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V8SImode) \
== GET_MODE_NUNITS (V32QImode)))
#define HAVE_vcondv4div32qi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V4DImode) \
== GET_MODE_NUNITS (V32QImode)))
#define HAVE_vcondv8sfv32qi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V8SFmode) \
== GET_MODE_NUNITS (V32QImode)))
#define HAVE_vcondv4dfv32qi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V4DFmode) \
== GET_MODE_NUNITS (V32QImode)))
#define HAVE_vcondv32qiv16hi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V32QImode) \
== GET_MODE_NUNITS (V16HImode)))
#define HAVE_vcondv16hiv16hi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V16HImode) \
== GET_MODE_NUNITS (V16HImode)))
#define HAVE_vcondv8siv16hi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V8SImode) \
== GET_MODE_NUNITS (V16HImode)))
#define HAVE_vcondv4div16hi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V4DImode) \
== GET_MODE_NUNITS (V16HImode)))
#define HAVE_vcondv8sfv16hi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V8SFmode) \
== GET_MODE_NUNITS (V16HImode)))
#define HAVE_vcondv4dfv16hi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V4DFmode) \
== GET_MODE_NUNITS (V16HImode)))
#define HAVE_vcondv32qiv8si (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V32QImode) \
== GET_MODE_NUNITS (V8SImode)))
#define HAVE_vcondv16hiv8si (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V16HImode) \
== GET_MODE_NUNITS (V8SImode)))
#define HAVE_vcondv8siv8si (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V8SImode) \
== GET_MODE_NUNITS (V8SImode)))
#define HAVE_vcondv4div8si (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V4DImode) \
== GET_MODE_NUNITS (V8SImode)))
#define HAVE_vcondv8sfv8si (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V8SFmode) \
== GET_MODE_NUNITS (V8SImode)))
#define HAVE_vcondv4dfv8si (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V4DFmode) \
== GET_MODE_NUNITS (V8SImode)))
#define HAVE_vcondv32qiv4di (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V32QImode) \
== GET_MODE_NUNITS (V4DImode)))
#define HAVE_vcondv16hiv4di (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V16HImode) \
== GET_MODE_NUNITS (V4DImode)))
#define HAVE_vcondv8siv4di (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V8SImode) \
== GET_MODE_NUNITS (V4DImode)))
#define HAVE_vcondv4div4di (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V4DImode) \
== GET_MODE_NUNITS (V4DImode)))
#define HAVE_vcondv8sfv4di (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V8SFmode) \
== GET_MODE_NUNITS (V4DImode)))
#define HAVE_vcondv4dfv4di (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V4DFmode) \
== GET_MODE_NUNITS (V4DImode)))
#define HAVE_vcondv16qiv16qi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V16QImode) \
== GET_MODE_NUNITS (V16QImode)))
#define HAVE_vcondv8hiv16qi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V8HImode) \
== GET_MODE_NUNITS (V16QImode)))
#define HAVE_vcondv4siv16qi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V4SImode) \
== GET_MODE_NUNITS (V16QImode)))
#define HAVE_vcondv2div16qi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V2DImode) \
== GET_MODE_NUNITS (V16QImode)))
#define HAVE_vcondv4sfv16qi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V4SFmode) \
== GET_MODE_NUNITS (V16QImode)))
#define HAVE_vcondv2dfv16qi ((TARGET_SSE2 \
&& (GET_MODE_NUNITS (V2DFmode) \
== GET_MODE_NUNITS (V16QImode))) && (TARGET_SSE2))
#define HAVE_vcondv16qiv8hi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V16QImode) \
== GET_MODE_NUNITS (V8HImode)))
#define HAVE_vcondv8hiv8hi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V8HImode) \
== GET_MODE_NUNITS (V8HImode)))
#define HAVE_vcondv4siv8hi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V4SImode) \
== GET_MODE_NUNITS (V8HImode)))
#define HAVE_vcondv2div8hi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V2DImode) \
== GET_MODE_NUNITS (V8HImode)))
#define HAVE_vcondv4sfv8hi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V4SFmode) \
== GET_MODE_NUNITS (V8HImode)))
#define HAVE_vcondv2dfv8hi ((TARGET_SSE2 \
&& (GET_MODE_NUNITS (V2DFmode) \
== GET_MODE_NUNITS (V8HImode))) && (TARGET_SSE2))
#define HAVE_vcondv16qiv4si (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V16QImode) \
== GET_MODE_NUNITS (V4SImode)))
#define HAVE_vcondv8hiv4si (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V8HImode) \
== GET_MODE_NUNITS (V4SImode)))
#define HAVE_vcondv4siv4si (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V4SImode) \
== GET_MODE_NUNITS (V4SImode)))
#define HAVE_vcondv2div4si (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V2DImode) \
== GET_MODE_NUNITS (V4SImode)))
#define HAVE_vcondv4sfv4si (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V4SFmode) \
== GET_MODE_NUNITS (V4SImode)))
#define HAVE_vcondv2dfv4si ((TARGET_SSE2 \
&& (GET_MODE_NUNITS (V2DFmode) \
== GET_MODE_NUNITS (V4SImode))) && (TARGET_SSE2))
#define HAVE_vcondv2div2di (TARGET_SSE4_2)
#define HAVE_vcondv2dfv2di (TARGET_SSE4_2)
#define HAVE_vconduv64qiv64qi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V64QImode) \
== GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
#define HAVE_vconduv32hiv64qi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V32HImode) \
== GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
#define HAVE_vconduv16siv64qi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SImode) \
== GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
#define HAVE_vconduv8div64qi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DImode) \
== GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
#define HAVE_vconduv16sfv64qi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SFmode) \
== GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
#define HAVE_vconduv8dfv64qi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DFmode) \
== GET_MODE_NUNITS (V64QImode))) && (TARGET_AVX512BW))
#define HAVE_vconduv64qiv32hi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V64QImode) \
== GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
#define HAVE_vconduv32hiv32hi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V32HImode) \
== GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
#define HAVE_vconduv16siv32hi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SImode) \
== GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
#define HAVE_vconduv8div32hi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DImode) \
== GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
#define HAVE_vconduv16sfv32hi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SFmode) \
== GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
#define HAVE_vconduv8dfv32hi ((TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DFmode) \
== GET_MODE_NUNITS (V32HImode))) && (TARGET_AVX512BW))
#define HAVE_vconduv64qiv16si (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V64QImode) \
== GET_MODE_NUNITS (V16SImode)))
#define HAVE_vconduv32hiv16si (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V32HImode) \
== GET_MODE_NUNITS (V16SImode)))
#define HAVE_vconduv16siv16si (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SImode) \
== GET_MODE_NUNITS (V16SImode)))
#define HAVE_vconduv8div16si (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DImode) \
== GET_MODE_NUNITS (V16SImode)))
#define HAVE_vconduv16sfv16si (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SFmode) \
== GET_MODE_NUNITS (V16SImode)))
#define HAVE_vconduv8dfv16si (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DFmode) \
== GET_MODE_NUNITS (V16SImode)))
#define HAVE_vconduv64qiv8di (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V64QImode) \
== GET_MODE_NUNITS (V8DImode)))
#define HAVE_vconduv32hiv8di (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V32HImode) \
== GET_MODE_NUNITS (V8DImode)))
#define HAVE_vconduv16siv8di (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SImode) \
== GET_MODE_NUNITS (V8DImode)))
#define HAVE_vconduv8div8di (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DImode) \
== GET_MODE_NUNITS (V8DImode)))
#define HAVE_vconduv16sfv8di (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V16SFmode) \
== GET_MODE_NUNITS (V8DImode)))
#define HAVE_vconduv8dfv8di (TARGET_AVX512F \
&& (GET_MODE_NUNITS (V8DFmode) \
== GET_MODE_NUNITS (V8DImode)))
#define HAVE_vconduv32qiv32qi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V32QImode) \
== GET_MODE_NUNITS (V32QImode)))
#define HAVE_vconduv16hiv32qi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V16HImode) \
== GET_MODE_NUNITS (V32QImode)))
#define HAVE_vconduv8siv32qi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V8SImode) \
== GET_MODE_NUNITS (V32QImode)))
#define HAVE_vconduv4div32qi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V4DImode) \
== GET_MODE_NUNITS (V32QImode)))
#define HAVE_vconduv8sfv32qi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V8SFmode) \
== GET_MODE_NUNITS (V32QImode)))
#define HAVE_vconduv4dfv32qi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V4DFmode) \
== GET_MODE_NUNITS (V32QImode)))
#define HAVE_vconduv32qiv16hi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V32QImode) \
== GET_MODE_NUNITS (V16HImode)))
#define HAVE_vconduv16hiv16hi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V16HImode) \
== GET_MODE_NUNITS (V16HImode)))
#define HAVE_vconduv8siv16hi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V8SImode) \
== GET_MODE_NUNITS (V16HImode)))
#define HAVE_vconduv4div16hi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V4DImode) \
== GET_MODE_NUNITS (V16HImode)))
#define HAVE_vconduv8sfv16hi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V8SFmode) \
== GET_MODE_NUNITS (V16HImode)))
#define HAVE_vconduv4dfv16hi (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V4DFmode) \
== GET_MODE_NUNITS (V16HImode)))
#define HAVE_vconduv32qiv8si (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V32QImode) \
== GET_MODE_NUNITS (V8SImode)))
#define HAVE_vconduv16hiv8si (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V16HImode) \
== GET_MODE_NUNITS (V8SImode)))
#define HAVE_vconduv8siv8si (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V8SImode) \
== GET_MODE_NUNITS (V8SImode)))
#define HAVE_vconduv4div8si (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V4DImode) \
== GET_MODE_NUNITS (V8SImode)))
#define HAVE_vconduv8sfv8si (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V8SFmode) \
== GET_MODE_NUNITS (V8SImode)))
#define HAVE_vconduv4dfv8si (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V4DFmode) \
== GET_MODE_NUNITS (V8SImode)))
#define HAVE_vconduv32qiv4di (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V32QImode) \
== GET_MODE_NUNITS (V4DImode)))
#define HAVE_vconduv16hiv4di (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V16HImode) \
== GET_MODE_NUNITS (V4DImode)))
#define HAVE_vconduv8siv4di (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V8SImode) \
== GET_MODE_NUNITS (V4DImode)))
#define HAVE_vconduv4div4di (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V4DImode) \
== GET_MODE_NUNITS (V4DImode)))
#define HAVE_vconduv8sfv4di (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V8SFmode) \
== GET_MODE_NUNITS (V4DImode)))
#define HAVE_vconduv4dfv4di (TARGET_AVX2 \
&& (GET_MODE_NUNITS (V4DFmode) \
== GET_MODE_NUNITS (V4DImode)))
#define HAVE_vconduv16qiv16qi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V16QImode) \
== GET_MODE_NUNITS (V16QImode)))
#define HAVE_vconduv8hiv16qi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V8HImode) \
== GET_MODE_NUNITS (V16QImode)))
#define HAVE_vconduv4siv16qi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V4SImode) \
== GET_MODE_NUNITS (V16QImode)))
#define HAVE_vconduv2div16qi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V2DImode) \
== GET_MODE_NUNITS (V16QImode)))
#define HAVE_vconduv4sfv16qi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V4SFmode) \
== GET_MODE_NUNITS (V16QImode)))
#define HAVE_vconduv2dfv16qi ((TARGET_SSE2 \
&& (GET_MODE_NUNITS (V2DFmode) \
== GET_MODE_NUNITS (V16QImode))) && (TARGET_SSE2))
#define HAVE_vconduv16qiv8hi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V16QImode) \
== GET_MODE_NUNITS (V8HImode)))
#define HAVE_vconduv8hiv8hi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V8HImode) \
== GET_MODE_NUNITS (V8HImode)))
#define HAVE_vconduv4siv8hi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V4SImode) \
== GET_MODE_NUNITS (V8HImode)))
#define HAVE_vconduv2div8hi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V2DImode) \
== GET_MODE_NUNITS (V8HImode)))
#define HAVE_vconduv4sfv8hi (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V4SFmode) \
== GET_MODE_NUNITS (V8HImode)))
#define HAVE_vconduv2dfv8hi ((TARGET_SSE2 \
&& (GET_MODE_NUNITS (V2DFmode) \
== GET_MODE_NUNITS (V8HImode))) && (TARGET_SSE2))
#define HAVE_vconduv16qiv4si (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V16QImode) \
== GET_MODE_NUNITS (V4SImode)))
#define HAVE_vconduv8hiv4si (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V8HImode) \
== GET_MODE_NUNITS (V4SImode)))
#define HAVE_vconduv4siv4si (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V4SImode) \
== GET_MODE_NUNITS (V4SImode)))
#define HAVE_vconduv2div4si (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V2DImode) \
== GET_MODE_NUNITS (V4SImode)))
#define HAVE_vconduv4sfv4si (TARGET_SSE2 \
&& (GET_MODE_NUNITS (V4SFmode) \
== GET_MODE_NUNITS (V4SImode)))
#define HAVE_vconduv2dfv4si ((TARGET_SSE2 \
&& (GET_MODE_NUNITS (V2DFmode) \
== GET_MODE_NUNITS (V4SImode))) && (TARGET_SSE2))
#define HAVE_vconduv2div2di (TARGET_SSE4_2)
#define HAVE_vconduv2dfv2di (TARGET_SSE4_2)
#define HAVE_vec_permv16qi (TARGET_SSSE3 || TARGET_AVX || TARGET_XOP)
#define HAVE_vec_permv8hi (TARGET_SSSE3 || TARGET_AVX || TARGET_XOP)
#define HAVE_vec_permv4si (TARGET_SSSE3 || TARGET_AVX || TARGET_XOP)
#define HAVE_vec_permv2di (TARGET_SSSE3 || TARGET_AVX || TARGET_XOP)
#define HAVE_vec_permv4sf (TARGET_SSSE3 || TARGET_AVX || TARGET_XOP)
#define HAVE_vec_permv2df (TARGET_SSSE3 || TARGET_AVX || TARGET_XOP)
#define HAVE_vec_permv32qi ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX2))
#define HAVE_vec_permv16hi ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX2))
#define HAVE_vec_permv8si ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX2))
#define HAVE_vec_permv4di ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX2))
#define HAVE_vec_permv8sf ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX2))
#define HAVE_vec_permv4df ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX2))
#define HAVE_vec_permv16sf ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX512F))
#define HAVE_vec_permv8df ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX512F))
#define HAVE_vec_permv16si ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX512F))
#define HAVE_vec_permv8di ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX512F))
#define HAVE_vec_permv32hi ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX512BW))
#define HAVE_vec_permv64qi ((TARGET_SSSE3 || TARGET_AVX || TARGET_XOP) && (TARGET_AVX512VBMI))
#define HAVE_vec_perm_constv4sf (TARGET_SSE)
#define HAVE_vec_perm_constv4si (TARGET_SSE)
#define HAVE_vec_perm_constv2df (TARGET_SSE)
#define HAVE_vec_perm_constv2di (TARGET_SSE)
#define HAVE_vec_perm_constv16qi (TARGET_SSE2)
#define HAVE_vec_perm_constv8hi (TARGET_SSE2)
#define HAVE_vec_perm_constv8sf (TARGET_AVX)
#define HAVE_vec_perm_constv4df (TARGET_AVX)
#define HAVE_vec_perm_constv8si (TARGET_AVX)
#define HAVE_vec_perm_constv4di (TARGET_AVX)
#define HAVE_vec_perm_constv32qi (TARGET_AVX2)
#define HAVE_vec_perm_constv16hi (TARGET_AVX2)
#define HAVE_vec_perm_constv16si (TARGET_AVX512F)
#define HAVE_vec_perm_constv8di (TARGET_AVX512F)
#define HAVE_vec_perm_constv16sf (TARGET_AVX512F)
#define HAVE_vec_perm_constv8df (TARGET_AVX512F)
#define HAVE_vec_perm_constv32hi (TARGET_AVX512BW)
#define HAVE_vec_perm_constv64qi (TARGET_AVX512BW)
#define HAVE_one_cmplv16si2 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_one_cmplv8di2 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_one_cmplv64qi2 ((TARGET_SSE) && (TARGET_AVX512BW))
#define HAVE_one_cmplv32qi2 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_one_cmplv16qi2 (TARGET_SSE)
#define HAVE_one_cmplv32hi2 ((TARGET_SSE) && (TARGET_AVX512BW))
#define HAVE_one_cmplv16hi2 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_one_cmplv8hi2 (TARGET_SSE)
#define HAVE_one_cmplv8si2 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_one_cmplv4si2 (TARGET_SSE)
#define HAVE_one_cmplv4di2 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_one_cmplv2di2 (TARGET_SSE)
#define HAVE_avx512bw_andnotv64qi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_avx2_andnotv32qi3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_sse2_andnotv16qi3 (TARGET_SSE2)
#define HAVE_avx512bw_andnotv32hi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_avx2_andnotv16hi3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_sse2_andnotv8hi3 (TARGET_SSE2)
#define HAVE_avx512f_andnotv16si3 ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_avx2_andnotv8si3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_sse2_andnotv4si3 (TARGET_SSE2)
#define HAVE_avx512f_andnotv8di3 ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_avx2_andnotv4di3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_sse2_andnotv2di3 (TARGET_SSE2)
#define HAVE_avx512f_andnotv16si3_mask (TARGET_AVX512F)
#define HAVE_avx2_andnotv8si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_sse2_andnotv4si3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_andnotv8di3_mask (TARGET_AVX512F)
#define HAVE_avx2_andnotv4di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_sse2_andnotv2di3_mask ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512bw_andnotv64qi3_mask (TARGET_AVX512BW)
#define HAVE_sse2_andnotv16qi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx2_andnotv32qi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512bw_andnotv32hi3_mask (TARGET_AVX512BW)
#define HAVE_avx2_andnotv16hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_sse2_andnotv8hi3_mask ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_andv16si3 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_iorv16si3 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_xorv16si3 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_andv8di3 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_iorv8di3 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_xorv8di3 ((TARGET_SSE) && (TARGET_AVX512F))
#define HAVE_andv64qi3 ((TARGET_SSE) && (TARGET_AVX512BW))
#define HAVE_iorv64qi3 ((TARGET_SSE) && (TARGET_AVX512BW))
#define HAVE_xorv64qi3 ((TARGET_SSE) && (TARGET_AVX512BW))
#define HAVE_andv32qi3 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_iorv32qi3 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_xorv32qi3 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_andv16qi3 (TARGET_SSE)
#define HAVE_iorv16qi3 (TARGET_SSE)
#define HAVE_xorv16qi3 (TARGET_SSE)
#define HAVE_andv32hi3 ((TARGET_SSE) && (TARGET_AVX512BW))
#define HAVE_iorv32hi3 ((TARGET_SSE) && (TARGET_AVX512BW))
#define HAVE_xorv32hi3 ((TARGET_SSE) && (TARGET_AVX512BW))
#define HAVE_andv16hi3 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_iorv16hi3 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_xorv16hi3 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_andv8hi3 (TARGET_SSE)
#define HAVE_iorv8hi3 (TARGET_SSE)
#define HAVE_xorv8hi3 (TARGET_SSE)
#define HAVE_andv8si3 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_iorv8si3 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_xorv8si3 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_andv4si3 (TARGET_SSE)
#define HAVE_iorv4si3 (TARGET_SSE)
#define HAVE_xorv4si3 (TARGET_SSE)
#define HAVE_andv4di3 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_iorv4di3 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_xorv4di3 ((TARGET_SSE) && (TARGET_AVX))
#define HAVE_andv2di3 (TARGET_SSE)
#define HAVE_iorv2di3 (TARGET_SSE)
#define HAVE_xorv2di3 (TARGET_SSE)
#define HAVE_vec_pack_trunc_v32hi ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_vec_pack_trunc_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_pack_trunc_v8hi (TARGET_SSE2)
#define HAVE_vec_pack_trunc_v16si ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_vec_pack_trunc_v8si ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_pack_trunc_v4si (TARGET_SSE2)
#define HAVE_vec_pack_trunc_v8di ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_vec_pack_trunc_v4di ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_pack_trunc_v2di (TARGET_SSE2)
#define HAVE_vec_pack_trunc_qi (TARGET_AVX512F)
#define HAVE_vec_pack_trunc_hi (TARGET_AVX512BW)
#define HAVE_vec_pack_trunc_si (TARGET_AVX512BW)
#define HAVE_vec_interleave_highv32qi (TARGET_AVX2)
#define HAVE_vec_interleave_highv16hi (TARGET_AVX2)
#define HAVE_vec_interleave_highv8si (TARGET_AVX2)
#define HAVE_vec_interleave_highv4di (TARGET_AVX2)
#define HAVE_vec_interleave_lowv32qi (TARGET_AVX2)
#define HAVE_vec_interleave_lowv16hi (TARGET_AVX2)
#define HAVE_vec_interleave_lowv8si (TARGET_AVX2)
#define HAVE_vec_interleave_lowv4di (TARGET_AVX2)
#define HAVE_avx512dq_vinsertf64x2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_avx512dq_vinserti64x2_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_avx512f_vinsertf32x4_mask (TARGET_AVX512F)
#define HAVE_avx512f_vinserti32x4_mask (TARGET_AVX512F)
#define HAVE_avx512dq_vinsertf32x8_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_avx512dq_vinserti32x8_mask ((TARGET_AVX512F) && (TARGET_AVX512DQ))
#define HAVE_avx512f_vinsertf64x4_mask (TARGET_AVX512F)
#define HAVE_avx512f_vinserti64x4_mask (TARGET_AVX512F)
#define HAVE_avx512dq_shuf_i64x2_mask (TARGET_AVX512DQ)
#define HAVE_avx512dq_shuf_f64x2_mask (TARGET_AVX512DQ)
#define HAVE_avx512f_shuf_f64x2_mask (TARGET_AVX512F)
#define HAVE_avx512f_shuf_i64x2_mask (TARGET_AVX512F)
#define HAVE_avx512vl_shuf_i32x4_mask (TARGET_AVX512VL)
#define HAVE_avx512vl_shuf_f32x4_mask (TARGET_AVX512VL)
#define HAVE_avx512f_shuf_f32x4_mask (TARGET_AVX512F)
#define HAVE_avx512f_shuf_i32x4_mask (TARGET_AVX512F)
#define HAVE_avx512f_pshufdv3_mask (TARGET_AVX512F)
#define HAVE_avx512vl_pshufdv3_mask (TARGET_AVX512VL)
#define HAVE_avx2_pshufdv3 (TARGET_AVX2)
#define HAVE_avx512vl_pshufd_mask (TARGET_AVX512VL)
#define HAVE_sse2_pshufd (TARGET_SSE2)
#define HAVE_avx512vl_pshuflwv3_mask (TARGET_AVX512VL && TARGET_AVX512BW)
#define HAVE_avx2_pshuflwv3 (TARGET_AVX2)
#define HAVE_avx512vl_pshuflw_mask (TARGET_AVX512VL && TARGET_AVX512BW)
#define HAVE_sse2_pshuflw (TARGET_SSE2)
#define HAVE_avx2_pshufhwv3 (TARGET_AVX2)
#define HAVE_avx512vl_pshufhwv3_mask (TARGET_AVX512VL && TARGET_AVX512BW)
#define HAVE_avx512vl_pshufhw_mask (TARGET_AVX512VL && TARGET_AVX512BW)
#define HAVE_sse2_pshufhw (TARGET_SSE2)
#define HAVE_sse2_loadd (TARGET_SSE)
#define HAVE_vec_unpacks_lo_v64qi ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_vec_unpacks_lo_v32qi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_unpacks_lo_v16qi (TARGET_SSE2)
#define HAVE_vec_unpacks_lo_v32hi ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_vec_unpacks_lo_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_unpacks_lo_v8hi (TARGET_SSE2)
#define HAVE_vec_unpacks_lo_v16si ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_vec_unpacks_lo_v8si ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_unpacks_lo_v4si (TARGET_SSE2)
#define HAVE_vec_unpacks_hi_v64qi ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_vec_unpacks_hi_v32qi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_unpacks_hi_v16qi (TARGET_SSE2)
#define HAVE_vec_unpacks_hi_v32hi ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_vec_unpacks_hi_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_unpacks_hi_v8hi (TARGET_SSE2)
#define HAVE_vec_unpacks_hi_v16si ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_vec_unpacks_hi_v8si ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_unpacks_hi_v4si (TARGET_SSE2)
#define HAVE_vec_unpacku_lo_v64qi ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_vec_unpacku_lo_v32qi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_unpacku_lo_v16qi (TARGET_SSE2)
#define HAVE_vec_unpacku_lo_v32hi ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_vec_unpacku_lo_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_unpacku_lo_v8hi (TARGET_SSE2)
#define HAVE_vec_unpacku_lo_v16si ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_vec_unpacku_lo_v8si ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_unpacku_lo_v4si (TARGET_SSE2)
#define HAVE_vec_unpacks_lo_hi (TARGET_AVX512DQ)
#define HAVE_vec_unpacks_lo_si (TARGET_AVX512F)
#define HAVE_vec_unpacks_lo_di (TARGET_AVX512BW)
#define HAVE_vec_unpacku_hi_v64qi ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_vec_unpacku_hi_v32qi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_unpacku_hi_v16qi (TARGET_SSE2)
#define HAVE_vec_unpacku_hi_v32hi ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_vec_unpacku_hi_v16hi ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_unpacku_hi_v8hi (TARGET_SSE2)
#define HAVE_vec_unpacku_hi_v16si ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_vec_unpacku_hi_v8si ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_vec_unpacku_hi_v4si (TARGET_SSE2)
#define HAVE_vec_unpacks_hi_hi (TARGET_AVX512F)
#define HAVE_vec_unpacks_hi_si (TARGET_AVX512BW)
#define HAVE_vec_unpacks_hi_di (TARGET_AVX512BW)
#define HAVE_avx512bw_uavgv64qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_avx512bw_uavgv64qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_avx2_uavgv32qi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
#define HAVE_avx2_uavgv32qi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_sse2_uavgv16qi3 (TARGET_SSE2 && 1 && 1)
#define HAVE_sse2_uavgv16qi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_avx512bw_uavgv32hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX512BW))
#define HAVE_avx512bw_uavgv32hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (64 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX512BW)))
#define HAVE_avx2_uavgv16hi3 ((TARGET_SSE2 && 1 && 1) && (TARGET_AVX2))
#define HAVE_avx2_uavgv16hi3_mask ((TARGET_AVX512F) && ((TARGET_SSE2 && (32 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW) && (TARGET_AVX2)))
#define HAVE_sse2_uavgv8hi3 (TARGET_SSE2 && 1 && 1)
#define HAVE_sse2_uavgv8hi3_mask ((TARGET_AVX512F) && (TARGET_SSE2 && (16 == 64 || TARGET_AVX512VL) && TARGET_AVX512BW))
#define HAVE_sse2_maskmovdqu (TARGET_SSE2)
#define HAVE_ssse3_pmulhrswv4hi3_mask (TARGET_AVX512BW && TARGET_AVX512VL)
#define HAVE_ssse3_pmulhrswv8hi3_mask (TARGET_AVX512BW && TARGET_AVX512VL)
#define HAVE_avx2_pmulhrswv16hi3_mask ((TARGET_AVX512BW && TARGET_AVX512VL) && (TARGET_AVX2))
#define HAVE_ssse3_pmulhrswv4hi3 (TARGET_AVX2)
#define HAVE_ssse3_pmulhrswv8hi3 (TARGET_AVX2)
#define HAVE_avx2_pmulhrswv16hi3 (TARGET_AVX2)
#define HAVE_absv64qi2 ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_absv32qi2 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_absv16qi2 (TARGET_SSE2)
#define HAVE_absv32hi2 ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_absv16hi2 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_absv8hi2 (TARGET_SSE2)
#define HAVE_absv16si2 ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_absv8si2 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_absv4si2 (TARGET_SSE2)
#define HAVE_absv8di2 ((TARGET_SSE2) && (TARGET_AVX512F))
#define HAVE_absv4di2 ((TARGET_SSE2) && (TARGET_AVX512VL))
#define HAVE_absv2di2 ((TARGET_SSE2) && (TARGET_AVX512VL))
#define HAVE_avx2_pblendw (TARGET_AVX2)
#define HAVE_avx_roundps_sfix256 ((TARGET_ROUND) && (TARGET_AVX))
#define HAVE_sse4_1_roundps_sfix (TARGET_ROUND)
#define HAVE_avx512f_roundps512 (TARGET_AVX512F)
#define HAVE_avx512f_roundpd512 (TARGET_AVX512F)
#define HAVE_avx512f_roundps512_sfix (TARGET_AVX512F)
#define HAVE_avx512f_roundpd_vec_pack_sfix512 ((TARGET_ROUND) && (TARGET_AVX512F))
#define HAVE_avx_roundpd_vec_pack_sfix256 ((TARGET_ROUND) && (TARGET_AVX))
#define HAVE_sse4_1_roundpd_vec_pack_sfix (TARGET_ROUND)
#define HAVE_roundv16sf2 ((TARGET_ROUND && !flag_trapping_math) && (TARGET_AVX512F))
#define HAVE_roundv8sf2 ((TARGET_ROUND && !flag_trapping_math) && (TARGET_AVX))
#define HAVE_roundv4sf2 (TARGET_ROUND && !flag_trapping_math)
#define HAVE_roundv8df2 ((TARGET_ROUND && !flag_trapping_math) && (TARGET_AVX512F))
#define HAVE_roundv4df2 ((TARGET_ROUND && !flag_trapping_math) && (TARGET_AVX))
#define HAVE_roundv2df2 ((TARGET_ROUND && !flag_trapping_math) && (TARGET_SSE2))
#define HAVE_roundv16sf2_sfix ((TARGET_ROUND && !flag_trapping_math) && (TARGET_AVX512F))
#define HAVE_roundv8sf2_sfix ((TARGET_ROUND && !flag_trapping_math) && (TARGET_AVX))
#define HAVE_roundv4sf2_sfix (TARGET_ROUND && !flag_trapping_math)
#define HAVE_roundv8df2_vec_pack_sfix ((TARGET_ROUND && !flag_trapping_math) && (TARGET_AVX512F))
#define HAVE_roundv4df2_vec_pack_sfix ((TARGET_ROUND && !flag_trapping_math) && (TARGET_AVX))
#define HAVE_roundv2df2_vec_pack_sfix (TARGET_ROUND && !flag_trapping_math)
#define HAVE_avx512pf_gatherpfv16sisf (TARGET_AVX512PF)
#define HAVE_avx512pf_gatherpfv8disf (TARGET_AVX512PF)
#define HAVE_avx512pf_gatherpfv8sidf (TARGET_AVX512PF)
#define HAVE_avx512pf_gatherpfv8didf (TARGET_AVX512PF)
#define HAVE_avx512pf_scatterpfv16sisf (TARGET_AVX512PF)
#define HAVE_avx512pf_scatterpfv8disf (TARGET_AVX512PF)
#define HAVE_avx512pf_scatterpfv8sidf (TARGET_AVX512PF)
#define HAVE_avx512pf_scatterpfv8didf (TARGET_AVX512PF)
#define HAVE_rotlv16qi3 (TARGET_XOP)
#define HAVE_rotlv8hi3 (TARGET_XOP)
#define HAVE_rotlv4si3 (TARGET_XOP)
#define HAVE_rotlv2di3 (TARGET_XOP)
#define HAVE_rotrv16qi3 (TARGET_XOP)
#define HAVE_rotrv8hi3 (TARGET_XOP)
#define HAVE_rotrv4si3 (TARGET_XOP)
#define HAVE_rotrv2di3 (TARGET_XOP)
#define HAVE_vrotrv16qi3 (TARGET_XOP)
#define HAVE_vrotrv8hi3 (TARGET_XOP)
#define HAVE_vrotrv4si3 (TARGET_XOP)
#define HAVE_vrotrv2di3 (TARGET_XOP)
#define HAVE_vrotlv16qi3 (TARGET_XOP)
#define HAVE_vrotlv8hi3 (TARGET_XOP)
#define HAVE_vrotlv4si3 (TARGET_XOP)
#define HAVE_vrotlv2di3 (TARGET_XOP)
#define HAVE_vlshrv16qi3 (TARGET_XOP)
#define HAVE_vlshrv8hi3 (TARGET_XOP)
#define HAVE_vlshrv4si3 (TARGET_AVX2 || TARGET_XOP)
#define HAVE_vlshrv2di3 (TARGET_AVX2 || TARGET_XOP)
#define HAVE_vlshrv16si3 (TARGET_AVX512F)
#define HAVE_vlshrv8di3 (TARGET_AVX512F)
#define HAVE_vlshrv8si3 (TARGET_AVX2)
#define HAVE_vlshrv4di3 (TARGET_AVX2)
#define HAVE_vashrv8hi3 (TARGET_XOP || (TARGET_AVX512BW && TARGET_AVX512VL))
#define HAVE_vashrv8hi3_mask ((TARGET_AVX512F) && (TARGET_XOP || (TARGET_AVX512BW && TARGET_AVX512VL)))
#define HAVE_vashrv16qi3 (TARGET_XOP)
#define HAVE_vashrv2di3 (TARGET_XOP || TARGET_AVX512VL)
#define HAVE_vashrv2di3_mask ((TARGET_AVX512F) && (TARGET_XOP || TARGET_AVX512VL))
#define HAVE_vashrv4si3 (TARGET_AVX2 || TARGET_XOP)
#define HAVE_vashrv16si3 (TARGET_AVX512F)
#define HAVE_vashrv8si3 (TARGET_AVX2)
#define HAVE_vashlv16qi3 (TARGET_XOP)
#define HAVE_vashlv8hi3 (TARGET_XOP)
#define HAVE_vashlv4si3 (TARGET_AVX2 || TARGET_XOP)
#define HAVE_vashlv2di3 (TARGET_AVX2 || TARGET_XOP)
#define HAVE_vashlv16si3 (TARGET_AVX512F)
#define HAVE_vashlv8di3 (TARGET_AVX512F)
#define HAVE_vashlv8si3 (TARGET_AVX2)
#define HAVE_vashlv4di3 (TARGET_AVX2)
#define HAVE_ashlv64qi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_lshrv64qi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_ashrv64qi3 ((TARGET_SSE2) && (TARGET_AVX512BW))
#define HAVE_ashlv32qi3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_lshrv32qi3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_ashrv32qi3 ((TARGET_SSE2) && (TARGET_AVX2))
#define HAVE_ashlv16qi3 (TARGET_SSE2)
#define HAVE_lshrv16qi3 (TARGET_SSE2)
#define HAVE_ashrv16qi3 (TARGET_SSE2)
#define HAVE_ashrv2di3 (TARGET_XOP || TARGET_AVX512VL)
#define HAVE_xop_vmfrczv4sf2 (TARGET_XOP)
#define HAVE_xop_vmfrczv2df2 ((TARGET_XOP) && (TARGET_SSE2))
#define HAVE_avx_vzeroall (TARGET_AVX)
#define HAVE_avx2_permv4di (TARGET_AVX2)
#define HAVE_avx2_permv4df (TARGET_AVX2)
#define HAVE_avx512f_permv8di ((TARGET_AVX2) && (TARGET_AVX512F))
#define HAVE_avx512f_permv8df ((TARGET_AVX2) && (TARGET_AVX512F))
#define HAVE_avx512vl_permv4di_mask (TARGET_AVX512F)
#define HAVE_avx512vl_permv4df_mask (TARGET_AVX512F)
#define HAVE_avx512f_permv8di_mask (TARGET_AVX512F)
#define HAVE_avx512f_permv8df_mask (TARGET_AVX512F)
#define HAVE_avx512f_vpermilv8df ((TARGET_AVX && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_vpermilv8df_mask ((TARGET_AVX512F) && ((TARGET_AVX && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx_vpermilv4df ((TARGET_AVX && 1) && (TARGET_AVX))
#define HAVE_avx_vpermilv4df_mask ((TARGET_AVX512F) && ((TARGET_AVX && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX)))
#define HAVE_avx_vpermilv2df (TARGET_AVX && 1)
#define HAVE_avx_vpermilv2df_mask ((TARGET_AVX512F) && (TARGET_AVX && (16 == 64 || TARGET_AVX512VL)))
#define HAVE_avx512f_vpermilv16sf ((TARGET_AVX && 1) && (TARGET_AVX512F))
#define HAVE_avx512f_vpermilv16sf_mask ((TARGET_AVX512F) && ((TARGET_AVX && (64 == 64 || TARGET_AVX512VL)) && (TARGET_AVX512F)))
#define HAVE_avx_vpermilv8sf ((TARGET_AVX && 1) && (TARGET_AVX))
#define HAVE_avx_vpermilv8sf_mask ((TARGET_AVX512F) && ((TARGET_AVX && (32 == 64 || TARGET_AVX512VL)) && (TARGET_AVX)))
#define HAVE_avx_vpermilv4sf (TARGET_AVX && 1)
#define HAVE_avx_vpermilv4sf_mask ((TARGET_AVX512F) && (TARGET_AVX && (16 == 64 || TARGET_AVX512VL)))
#define HAVE_avx512f_vpermi2varv16si3_maskz (TARGET_AVX512F)
#define HAVE_avx512f_vpermi2varv16sf3_maskz (TARGET_AVX512F)
#define HAVE_avx512f_vpermi2varv8di3_maskz (TARGET_AVX512F)
#define HAVE_avx512f_vpermi2varv8df3_maskz (TARGET_AVX512F)
#define HAVE_avx512vl_vpermi2varv8si3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv8sf3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv4di3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv4df3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv4si3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv4sf3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv2di3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv2df3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512bw_vpermi2varv64qi3_maskz (TARGET_AVX512VBMI)
#define HAVE_avx512vl_vpermi2varv16qi3_maskz ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv32qi3_maskz ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv8hi3_maskz ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermi2varv16hi3_maskz ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512bw_vpermi2varv32hi3_maskz (TARGET_AVX512BW)
#define HAVE_avx512f_vpermt2varv16si3_maskz (TARGET_AVX512F)
#define HAVE_avx512f_vpermt2varv16sf3_maskz (TARGET_AVX512F)
#define HAVE_avx512f_vpermt2varv8di3_maskz (TARGET_AVX512F)
#define HAVE_avx512f_vpermt2varv8df3_maskz (TARGET_AVX512F)
#define HAVE_avx512vl_vpermt2varv8si3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv8sf3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv4di3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv4df3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv4si3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv4sf3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv2di3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv2df3_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512bw_vpermt2varv64qi3_maskz (TARGET_AVX512VBMI)
#define HAVE_avx512vl_vpermt2varv16qi3_maskz ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv32qi3_maskz ((TARGET_AVX512VBMI) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv8hi3_maskz ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512vl_vpermt2varv16hi3_maskz ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_avx512bw_vpermt2varv32hi3_maskz (TARGET_AVX512BW)
#define HAVE_avx_vperm2f128v8si3 (TARGET_AVX)
#define HAVE_avx_vperm2f128v8sf3 (TARGET_AVX)
#define HAVE_avx_vperm2f128v4df3 (TARGET_AVX)
#define HAVE_avx512vl_vinsertv8si (TARGET_AVX512VL)
#define HAVE_avx512vl_vinsertv8sf (TARGET_AVX512VL)
#define HAVE_avx512vl_vinsertv4di (TARGET_AVX512VL)
#define HAVE_avx512vl_vinsertv4df (TARGET_AVX512VL)
#define HAVE_avx_vinsertf128v32qi (TARGET_AVX)
#define HAVE_avx_vinsertf128v16hi (TARGET_AVX)
#define HAVE_avx_vinsertf128v8si (TARGET_AVX)
#define HAVE_avx_vinsertf128v4di (TARGET_AVX)
#define HAVE_avx_vinsertf128v8sf (TARGET_AVX)
#define HAVE_avx_vinsertf128v4df (TARGET_AVX)
#define HAVE_maskloadv4sfv4si (TARGET_AVX)
#define HAVE_maskloadv2dfv2di (TARGET_AVX)
#define HAVE_maskloadv8sfv8si (TARGET_AVX)
#define HAVE_maskloadv4dfv4di (TARGET_AVX)
#define HAVE_maskloadv4siv4si ((TARGET_AVX) && (TARGET_AVX2))
#define HAVE_maskloadv2div2di ((TARGET_AVX) && (TARGET_AVX2))
#define HAVE_maskloadv8siv8si ((TARGET_AVX) && (TARGET_AVX2))
#define HAVE_maskloadv4div4di ((TARGET_AVX) && (TARGET_AVX2))
#define HAVE_maskloadv16sihi (TARGET_AVX512F)
#define HAVE_maskloadv8siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_maskloadv4siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_maskloadv8diqi (TARGET_AVX512F)
#define HAVE_maskloadv4diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_maskloadv2diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_maskloadv16sfhi (TARGET_AVX512F)
#define HAVE_maskloadv8sfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_maskloadv4sfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_maskloadv8dfqi (TARGET_AVX512F)
#define HAVE_maskloadv4dfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_maskloadv2dfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_maskloadv64qidi (TARGET_AVX512BW)
#define HAVE_maskloadv16qihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_maskloadv32qisi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_maskloadv32hisi (TARGET_AVX512BW)
#define HAVE_maskloadv16hihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_maskloadv8hiqi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_maskstorev4sfv4si (TARGET_AVX)
#define HAVE_maskstorev2dfv2di (TARGET_AVX)
#define HAVE_maskstorev8sfv8si (TARGET_AVX)
#define HAVE_maskstorev4dfv4di (TARGET_AVX)
#define HAVE_maskstorev4siv4si ((TARGET_AVX) && (TARGET_AVX2))
#define HAVE_maskstorev2div2di ((TARGET_AVX) && (TARGET_AVX2))
#define HAVE_maskstorev8siv8si ((TARGET_AVX) && (TARGET_AVX2))
#define HAVE_maskstorev4div4di ((TARGET_AVX) && (TARGET_AVX2))
#define HAVE_maskstorev16sihi (TARGET_AVX512F)
#define HAVE_maskstorev8siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_maskstorev4siqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_maskstorev8diqi (TARGET_AVX512F)
#define HAVE_maskstorev4diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_maskstorev2diqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_maskstorev16sfhi (TARGET_AVX512F)
#define HAVE_maskstorev8sfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_maskstorev4sfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_maskstorev8dfqi (TARGET_AVX512F)
#define HAVE_maskstorev4dfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_maskstorev2dfqi ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_maskstorev64qidi (TARGET_AVX512BW)
#define HAVE_maskstorev16qihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_maskstorev32qisi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_maskstorev32hisi (TARGET_AVX512BW)
#define HAVE_maskstorev16hihi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_maskstorev8hiqi ((TARGET_AVX512BW) && (TARGET_AVX512VL))
#define HAVE_cbranchv4si4 (TARGET_SSE4_1)
#define HAVE_cbranchv2di4 (TARGET_SSE4_1)
#define HAVE_cbranchv8si4 ((TARGET_SSE4_1) && (TARGET_AVX))
#define HAVE_cbranchv4di4 ((TARGET_SSE4_1) && (TARGET_AVX))
#define HAVE_vec_initv32qi (TARGET_AVX)
#define HAVE_vec_initv16hi (TARGET_AVX)
#define HAVE_vec_initv8si (TARGET_AVX)
#define HAVE_vec_initv4di (TARGET_AVX)
#define HAVE_vec_initv8sf (TARGET_AVX)
#define HAVE_vec_initv4df (TARGET_AVX)
#define HAVE_vec_initv16si (TARGET_AVX512F)
#define HAVE_vec_initv16sf (TARGET_AVX512F)
#define HAVE_vec_initv8di (TARGET_AVX512F)
#define HAVE_vec_initv8df (TARGET_AVX512F)
#define HAVE_vec_initv32hi (TARGET_AVX512F)
#define HAVE_vec_initv64qi (TARGET_AVX512F)
#define HAVE_vcvtps2ph_mask (TARGET_AVX512VL)
#define HAVE_vcvtps2ph (TARGET_F16C)
#define HAVE_avx2_gathersiv2di (TARGET_AVX2)
#define HAVE_avx2_gathersiv2df (TARGET_AVX2)
#define HAVE_avx2_gathersiv4di (TARGET_AVX2)
#define HAVE_avx2_gathersiv4df (TARGET_AVX2)
#define HAVE_avx2_gathersiv4si (TARGET_AVX2)
#define HAVE_avx2_gathersiv4sf (TARGET_AVX2)
#define HAVE_avx2_gathersiv8si (TARGET_AVX2)
#define HAVE_avx2_gathersiv8sf (TARGET_AVX2)
#define HAVE_avx2_gatherdiv2di (TARGET_AVX2)
#define HAVE_avx2_gatherdiv2df (TARGET_AVX2)
#define HAVE_avx2_gatherdiv4di (TARGET_AVX2)
#define HAVE_avx2_gatherdiv4df (TARGET_AVX2)
#define HAVE_avx2_gatherdiv4si (TARGET_AVX2)
#define HAVE_avx2_gatherdiv4sf (TARGET_AVX2)
#define HAVE_avx2_gatherdiv8si (TARGET_AVX2)
#define HAVE_avx2_gatherdiv8sf (TARGET_AVX2)
#define HAVE_avx512f_gathersiv16si (TARGET_AVX512F)
#define HAVE_avx512f_gathersiv16sf (TARGET_AVX512F)
#define HAVE_avx512f_gathersiv8di (TARGET_AVX512F)
#define HAVE_avx512f_gathersiv8df (TARGET_AVX512F)
#define HAVE_avx512vl_gathersiv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gathersiv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gathersiv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gathersiv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gathersiv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gathersiv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gathersiv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gathersiv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_gatherdiv16si (TARGET_AVX512F)
#define HAVE_avx512f_gatherdiv16sf (TARGET_AVX512F)
#define HAVE_avx512f_gatherdiv8di (TARGET_AVX512F)
#define HAVE_avx512f_gatherdiv8df (TARGET_AVX512F)
#define HAVE_avx512vl_gatherdiv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gatherdiv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gatherdiv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gatherdiv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gatherdiv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gatherdiv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gatherdiv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_gatherdiv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_scattersiv16si (TARGET_AVX512F)
#define HAVE_avx512f_scattersiv16sf (TARGET_AVX512F)
#define HAVE_avx512f_scattersiv8di (TARGET_AVX512F)
#define HAVE_avx512f_scattersiv8df (TARGET_AVX512F)
#define HAVE_avx512vl_scattersiv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scattersiv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scattersiv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scattersiv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scattersiv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scattersiv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scattersiv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scattersiv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_scatterdiv16si (TARGET_AVX512F)
#define HAVE_avx512f_scatterdiv16sf (TARGET_AVX512F)
#define HAVE_avx512f_scatterdiv8di (TARGET_AVX512F)
#define HAVE_avx512f_scatterdiv8df (TARGET_AVX512F)
#define HAVE_avx512vl_scatterdiv8si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scatterdiv8sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scatterdiv4di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scatterdiv4df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scatterdiv4si ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scatterdiv4sf ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scatterdiv2di ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_scatterdiv2df ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512f_expandv16si_maskz (TARGET_AVX512F)
#define HAVE_avx512f_expandv16sf_maskz (TARGET_AVX512F)
#define HAVE_avx512f_expandv8di_maskz (TARGET_AVX512F)
#define HAVE_avx512f_expandv8df_maskz (TARGET_AVX512F)
#define HAVE_avx512vl_expandv8si_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_expandv8sf_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_expandv4di_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_expandv4df_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_expandv4si_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_expandv4sf_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_expandv2di_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_avx512vl_expandv2df_maskz ((TARGET_AVX512F) && (TARGET_AVX512VL))
#define HAVE_vpamdd52huqv8di_maskz (TARGET_AVX512IFMA)
#define HAVE_vpamdd52huqv4di_maskz ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
#define HAVE_vpamdd52huqv2di_maskz ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
#define HAVE_vpamdd52luqv8di_maskz (TARGET_AVX512IFMA)
#define HAVE_vpamdd52luqv4di_maskz ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
#define HAVE_vpamdd52luqv2di_maskz ((TARGET_AVX512IFMA) && (TARGET_AVX512VL))
#define HAVE_sse2_lfence (TARGET_SSE2)
#define HAVE_sse_sfence (TARGET_SSE || TARGET_3DNOW_A)
#define HAVE_sse2_mfence (TARGET_SSE2)
#define HAVE_mem_thread_fence 1
#define HAVE_atomic_loadqi 1
#define HAVE_atomic_loadhi 1
#define HAVE_atomic_loadsi 1
#define HAVE_atomic_loaddi (TARGET_64BIT || (TARGET_CMPXCHG8B && (TARGET_80387 || TARGET_SSE)))
#define HAVE_atomic_storeqi 1
#define HAVE_atomic_storehi 1
#define HAVE_atomic_storesi 1
#define HAVE_atomic_storedi (TARGET_64BIT || (TARGET_CMPXCHG8B && (TARGET_80387 || TARGET_SSE)))
#define HAVE_atomic_compare_and_swapqi (TARGET_CMPXCHG)
#define HAVE_atomic_compare_and_swaphi (TARGET_CMPXCHG)
#define HAVE_atomic_compare_and_swapsi (TARGET_CMPXCHG)
#define HAVE_atomic_compare_and_swapdi ((TARGET_CMPXCHG) && (TARGET_64BIT || TARGET_CMPXCHG8B))
#define HAVE_atomic_compare_and_swapti ((TARGET_CMPXCHG) && (TARGET_64BIT && TARGET_CMPXCHG16B))
extern rtx gen_x86_fnstsw_1 (rtx);
extern rtx gen_x86_sahf_1 (rtx);
extern rtx gen_kmovw (rtx, rtx);
extern rtx gen_insvhi_1 (rtx, rtx);
extern rtx gen_insvsi_1 (rtx, rtx);
extern rtx gen_insvdi_1 (rtx, rtx);
extern rtx gen_swapxf (rtx, rtx);
extern rtx gen_zero_extendqidi2 (rtx, rtx);
extern rtx gen_zero_extendhidi2 (rtx, rtx);
extern rtx gen_zero_extendqisi2_and (rtx, rtx);
extern rtx gen_zero_extendhisi2_and (rtx, rtx);
extern rtx gen_zero_extendqihi2_and (rtx, rtx);
extern rtx gen_extendsidi2_1 (rtx, rtx);
extern rtx gen_extendqidi2 (rtx, rtx);
extern rtx gen_extendhidi2 (rtx, rtx);
extern rtx gen_extendhisi2 (rtx, rtx);
extern rtx gen_extendqisi2 (rtx, rtx);
extern rtx gen_extendqihi2 (rtx, rtx);
extern rtx gen_truncxfsf2_i387_noop (rtx, rtx);
extern rtx gen_truncxfdf2_i387_noop (rtx, rtx);
extern rtx gen_fix_truncsfsi_sse (rtx, rtx);
extern rtx gen_fix_truncsfdi_sse (rtx, rtx);
extern rtx gen_fix_truncdfsi_sse (rtx, rtx);
extern rtx gen_fix_truncdfdi_sse (rtx, rtx);
extern rtx gen_fix_trunchi_fisttp_i387_1 (rtx, rtx);
extern rtx gen_fix_truncsi_fisttp_i387_1 (rtx, rtx);
extern rtx gen_fix_truncdi_fisttp_i387_1 (rtx, rtx);
extern rtx gen_fix_trunchi_i387_fisttp (rtx, rtx);
extern rtx gen_fix_truncsi_i387_fisttp (rtx, rtx);
extern rtx gen_fix_truncdi_i387_fisttp (rtx, rtx);
extern rtx gen_fix_trunchi_i387_fisttp_with_temp (rtx, rtx, rtx);
extern rtx gen_fix_truncsi_i387_fisttp_with_temp (rtx, rtx, rtx);
extern rtx gen_fix_truncdi_i387_fisttp_with_temp (rtx, rtx, rtx);
extern rtx gen_fix_truncdi_i387 (rtx, rtx, rtx, rtx);
extern rtx gen_fix_truncdi_i387_with_temp (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fix_trunchi_i387 (rtx, rtx, rtx, rtx);
extern rtx gen_fix_truncsi_i387 (rtx, rtx, rtx, rtx);
extern rtx gen_fix_trunchi_i387_with_temp (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fix_truncsi_i387_with_temp (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_x86_fnstcw_1 (rtx);
extern rtx gen_x86_fldcw_1 (rtx);
extern rtx gen_floathisf2 (rtx, rtx);
extern rtx gen_floathidf2 (rtx, rtx);
extern rtx gen_floathixf2 (rtx, rtx);
extern rtx gen_floatsixf2 (rtx, rtx);
extern rtx gen_floatdixf2 (rtx, rtx);
extern rtx gen_floatdisf2_i387_with_xmm (rtx, rtx, rtx);
extern rtx gen_floatdidf2_i387_with_xmm (rtx, rtx, rtx);
extern rtx gen_floatdixf2_i387_with_xmm (rtx, rtx, rtx);
extern rtx gen_addsi_1_zext (rtx, rtx, rtx);
extern rtx gen_addqi_ext_1 (rtx, rtx, rtx);
extern rtx gen_addqi3_carry (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addhi3_carry (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addsi3_carry (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_adddi3_carry (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addcarrysi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addcarrydi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subqi3_carry (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subhi3_carry (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subsi3_carry (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subdi3_carry (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subborrowsi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subborrowdi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_divmodsi4_1 (rtx, rtx, rtx, rtx);
extern rtx gen_divmoddi4_1 (rtx, rtx, rtx, rtx);
extern rtx gen_divmodhiqi3 (rtx, rtx, rtx);
extern rtx gen_udivmodsi4_1 (rtx, rtx, rtx, rtx);
extern rtx gen_udivmoddi4_1 (rtx, rtx, rtx, rtx);
extern rtx gen_udivmodhiqi3 (rtx, rtx, rtx);
extern rtx gen_kandnqi (rtx, rtx, rtx);
extern rtx gen_kandnhi (rtx, rtx, rtx);
extern rtx gen_andqi_ext_0 (rtx, rtx, rtx);
extern rtx gen_kxnorqi (rtx, rtx, rtx);
extern rtx gen_kxnorhi (rtx, rtx, rtx);
extern rtx gen_kxnorsi (rtx, rtx, rtx);
extern rtx gen_kxnordi (rtx, rtx, rtx);
extern rtx gen_kortestzhi (rtx, rtx);
extern rtx gen_kortestchi (rtx, rtx);
extern rtx gen_kunpckhi (rtx, rtx, rtx);
extern rtx gen_kunpcksi (rtx, rtx, rtx);
extern rtx gen_kunpckdi (rtx, rtx, rtx);
extern rtx gen_copysignsf3_const (rtx, rtx, rtx, rtx);
extern rtx gen_copysigndf3_const (rtx, rtx, rtx, rtx);
extern rtx gen_copysigntf3_const (rtx, rtx, rtx, rtx);
extern rtx gen_copysignsf3_var (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_copysigndf3_var (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_copysigntf3_var (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_x86_64_shld (rtx, rtx, rtx);
extern rtx gen_x86_shld (rtx, rtx, rtx);
extern rtx gen_x86_64_shrd (rtx, rtx, rtx);
extern rtx gen_x86_shrd (rtx, rtx, rtx);
extern rtx gen_ashrdi3_cvt (rtx, rtx, rtx);
extern rtx gen_ashrsi3_cvt (rtx, rtx, rtx);
extern rtx gen_ix86_rotldi3_doubleword (rtx, rtx, rtx);
extern rtx gen_ix86_rotlti3_doubleword (rtx, rtx, rtx);
extern rtx gen_ix86_rotrdi3_doubleword (rtx, rtx, rtx);
extern rtx gen_ix86_rotrti3_doubleword (rtx, rtx, rtx);
extern rtx gen_setcc_sf_sse (rtx, rtx, rtx, rtx);
extern rtx gen_setcc_df_sse (rtx, rtx, rtx, rtx);
extern rtx gen_jump (rtx);
extern rtx gen_blockage (void);
extern rtx gen_prologue_use (rtx);
extern rtx gen_simple_return_internal (void);
extern rtx gen_simple_return_internal_long (void);
extern rtx gen_simple_return_pop_internal (rtx);
extern rtx gen_simple_return_indirect_internal (rtx);
extern rtx gen_nop (void);
extern rtx gen_nops (rtx);
extern rtx gen_pad (rtx);
extern rtx gen_set_got_rex64 (rtx);
extern rtx gen_set_rip_rex64 (rtx, rtx);
extern rtx gen_set_got_offset_rex64 (rtx, rtx);
extern rtx gen_eh_return_internal (void);
extern rtx gen_leave (void);
extern rtx gen_leave_rex64 (void);
extern rtx gen_split_stack_return (rtx);
extern rtx gen_ffssi2_no_cmove (rtx, rtx);
extern rtx gen_bmi_bextr_si (rtx, rtx, rtx);
extern rtx gen_bmi_bextr_di (rtx, rtx, rtx);
extern rtx gen_bmi2_pdep_si3 (rtx, rtx, rtx);
extern rtx gen_bmi2_pdep_di3 (rtx, rtx, rtx);
extern rtx gen_bmi2_pext_si3 (rtx, rtx, rtx);
extern rtx gen_bmi2_pext_di3 (rtx, rtx, rtx);
extern rtx gen_tbm_bextri_si (rtx, rtx, rtx, rtx);
extern rtx gen_tbm_bextri_di (rtx, rtx, rtx, rtx);
extern rtx gen_bsr_rex64 (rtx, rtx);
extern rtx gen_bsr (rtx, rtx);
extern rtx gen_bswaphi_lowpart (rtx);
extern rtx gen_paritydi2_cmp (rtx, rtx, rtx, rtx);
extern rtx gen_paritysi2_cmp (rtx, rtx, rtx);
static inline rtx gen_tls_initial_exec_64_sun (rtx, rtx);
static inline rtx
gen_tls_initial_exec_64_sun(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
{
return 0;
}
extern rtx gen_truncxfsf2_i387_noop_unspec (rtx, rtx);
extern rtx gen_truncxfdf2_i387_noop_unspec (rtx, rtx);
extern rtx gen_sqrtxf2 (rtx, rtx);
extern rtx gen_sqrt_extendsfxf2_i387 (rtx, rtx);
extern rtx gen_sqrt_extenddfxf2_i387 (rtx, rtx);
extern rtx gen_fpremxf4_i387 (rtx, rtx, rtx, rtx);
extern rtx gen_fprem1xf4_i387 (rtx, rtx, rtx, rtx);
extern rtx gen_sincosxf3 (rtx, rtx, rtx);
extern rtx gen_sincos_extendsfxf3_i387 (rtx, rtx, rtx);
extern rtx gen_sincos_extenddfxf3_i387 (rtx, rtx, rtx);
extern rtx gen_fptanxf4_i387 (rtx, rtx, rtx, rtx);
extern rtx gen_fptan_extendsfxf4_i387 (rtx, rtx, rtx, rtx);
extern rtx gen_fptan_extenddfxf4_i387 (rtx, rtx, rtx, rtx);
extern rtx gen_fpatan_extendsfxf3_i387 (rtx, rtx, rtx);
extern rtx gen_fpatan_extenddfxf3_i387 (rtx, rtx, rtx);
extern rtx gen_fyl2xxf3_i387 (rtx, rtx, rtx);
extern rtx gen_fyl2x_extendsfxf3_i387 (rtx, rtx, rtx);
extern rtx gen_fyl2x_extenddfxf3_i387 (rtx, rtx, rtx);
extern rtx gen_fyl2xp1xf3_i387 (rtx, rtx, rtx);
extern rtx gen_fyl2xp1_extendsfxf3_i387 (rtx, rtx, rtx);
extern rtx gen_fyl2xp1_extenddfxf3_i387 (rtx, rtx, rtx);
extern rtx gen_fxtractxf3_i387 (rtx, rtx, rtx);
extern rtx gen_fxtract_extendsfxf3_i387 (rtx, rtx, rtx);
extern rtx gen_fxtract_extenddfxf3_i387 (rtx, rtx, rtx);
extern rtx gen_fscalexf4_i387 (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_roundsf2 (rtx, rtx, rtx);
extern rtx gen_sse4_1_rounddf2 (rtx, rtx, rtx);
extern rtx gen_rintxf2 (rtx, rtx);
extern rtx gen_fistdi2 (rtx, rtx);
extern rtx gen_fistdi2_with_temp (rtx, rtx, rtx);
extern rtx gen_fisthi2 (rtx, rtx);
extern rtx gen_fistsi2 (rtx, rtx);
extern rtx gen_fisthi2_with_temp (rtx, rtx, rtx);
extern rtx gen_fistsi2_with_temp (rtx, rtx, rtx);
extern rtx gen_frndintxf2_floor (rtx, rtx);
extern rtx gen_frndintxf2_ceil (rtx, rtx);
extern rtx gen_frndintxf2_trunc (rtx, rtx);
extern rtx gen_frndintxf2_floor_i387 (rtx, rtx, rtx, rtx);
extern rtx gen_frndintxf2_ceil_i387 (rtx, rtx, rtx, rtx);
extern rtx gen_frndintxf2_trunc_i387 (rtx, rtx, rtx, rtx);
extern rtx gen_frndintxf2_mask_pm (rtx, rtx);
extern rtx gen_frndintxf2_mask_pm_i387 (rtx, rtx, rtx, rtx);
extern rtx gen_fistdi2_floor (rtx, rtx, rtx, rtx);
extern rtx gen_fistdi2_ceil (rtx, rtx, rtx, rtx);
extern rtx gen_fistdi2_floor_with_temp (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fistdi2_ceil_with_temp (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fisthi2_floor (rtx, rtx, rtx, rtx);
extern rtx gen_fisthi2_ceil (rtx, rtx, rtx, rtx);
extern rtx gen_fistsi2_floor (rtx, rtx, rtx, rtx);
extern rtx gen_fistsi2_ceil (rtx, rtx, rtx, rtx);
extern rtx gen_fisthi2_floor_with_temp (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fisthi2_ceil_with_temp (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fistsi2_floor_with_temp (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fistsi2_ceil_with_temp (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fxamsf2_i387 (rtx, rtx);
extern rtx gen_fxamdf2_i387 (rtx, rtx);
extern rtx gen_fxamxf2_i387 (rtx, rtx);
extern rtx gen_fxamsf2_i387_with_temp (rtx, rtx);
extern rtx gen_fxamdf2_i387_with_temp (rtx, rtx);
extern rtx gen_movmsk_df (rtx, rtx);
extern rtx gen_cld (void);
extern rtx gen_smaxsf3 (rtx, rtx, rtx);
extern rtx gen_sminsf3 (rtx, rtx, rtx);
extern rtx gen_smaxdf3 (rtx, rtx, rtx);
extern rtx gen_smindf3 (rtx, rtx, rtx);
extern rtx gen_pro_epilogue_adjust_stack_si_add (rtx, rtx, rtx);
extern rtx gen_pro_epilogue_adjust_stack_di_add (rtx, rtx, rtx);
extern rtx gen_pro_epilogue_adjust_stack_si_sub (rtx, rtx, rtx);
extern rtx gen_pro_epilogue_adjust_stack_di_sub (rtx, rtx, rtx);
extern rtx gen_allocate_stack_worker_probe_si (rtx, rtx);
extern rtx gen_allocate_stack_worker_probe_di (rtx, rtx);
extern rtx gen_adjust_stack_and_probesi (rtx, rtx, rtx);
extern rtx gen_adjust_stack_and_probedi (rtx, rtx, rtx);
extern rtx gen_probe_stack_rangesi (rtx, rtx, rtx);
extern rtx gen_probe_stack_rangedi (rtx, rtx, rtx);
extern rtx gen_trap (void);
extern rtx gen_stack_protect_set_si (rtx, rtx);
extern rtx gen_stack_protect_set_di (rtx, rtx);
extern rtx gen_stack_tls_protect_set_si (rtx, rtx);
extern rtx gen_stack_tls_protect_set_di (rtx, rtx);
extern rtx gen_stack_protect_test_si (rtx, rtx, rtx);
extern rtx gen_stack_protect_test_di (rtx, rtx, rtx);
extern rtx gen_stack_tls_protect_test_si (rtx, rtx, rtx);
extern rtx gen_stack_tls_protect_test_di (rtx, rtx, rtx);
extern rtx gen_sse4_2_crc32qi (rtx, rtx, rtx);
extern rtx gen_sse4_2_crc32hi (rtx, rtx, rtx);
extern rtx gen_sse4_2_crc32si (rtx, rtx, rtx);
extern rtx gen_sse4_2_crc32di (rtx, rtx, rtx);
extern rtx gen_rdpmc (rtx, rtx);
extern rtx gen_rdpmc_rex64 (rtx, rtx, rtx);
extern rtx gen_rdtsc (rtx);
extern rtx gen_rdtsc_rex64 (rtx, rtx);
extern rtx gen_rdtscp (rtx, rtx);
extern rtx gen_rdtscp_rex64 (rtx, rtx, rtx);
extern rtx gen_fxsave (rtx);
extern rtx gen_fxsave64 (rtx);
extern rtx gen_fxrstor (rtx);
extern rtx gen_fxrstor64 (rtx);
extern rtx gen_xsave (rtx, rtx);
extern rtx gen_xsaveopt (rtx, rtx);
extern rtx gen_xsavec (rtx, rtx);
extern rtx gen_xsaves (rtx, rtx);
extern rtx gen_xsave_rex64 (rtx, rtx, rtx);
extern rtx gen_xsaveopt_rex64 (rtx, rtx, rtx);
extern rtx gen_xsavec_rex64 (rtx, rtx, rtx);
extern rtx gen_xsaves_rex64 (rtx, rtx, rtx);
extern rtx gen_xsave64 (rtx, rtx, rtx);
extern rtx gen_xsaveopt64 (rtx, rtx, rtx);
extern rtx gen_xsavec64 (rtx, rtx, rtx);
extern rtx gen_xsaves64 (rtx, rtx, rtx);
extern rtx gen_xrstor (rtx, rtx);
extern rtx gen_xrstors (rtx, rtx);
extern rtx gen_xrstor_rex64 (rtx, rtx, rtx);
extern rtx gen_xrstors_rex64 (rtx, rtx, rtx);
extern rtx gen_xrstor64 (rtx, rtx, rtx);
extern rtx gen_xrstors64 (rtx, rtx, rtx);
extern rtx gen_fnstenv (rtx);
extern rtx gen_fldenv (rtx);
extern rtx gen_fnstsw (rtx);
extern rtx gen_fnclex (void);
extern rtx gen_lwp_slwpcbsi (rtx);
extern rtx gen_lwp_slwpcbdi (rtx);
extern rtx gen_rdfsbasesi (rtx);
extern rtx gen_rdgsbasesi (rtx);
extern rtx gen_rdfsbasedi (rtx);
extern rtx gen_rdgsbasedi (rtx);
extern rtx gen_wrfsbasesi (rtx);
extern rtx gen_wrgsbasesi (rtx);
extern rtx gen_wrfsbasedi (rtx);
extern rtx gen_wrgsbasedi (rtx);
extern rtx gen_rdrandhi_1 (rtx);
extern rtx gen_rdrandsi_1 (rtx);
extern rtx gen_rdranddi_1 (rtx);
extern rtx gen_rdseedhi_1 (rtx);
extern rtx gen_rdseedsi_1 (rtx);
extern rtx gen_rdseeddi_1 (rtx);
extern rtx gen_xbegin_1 (rtx, rtx);
extern rtx gen_xend (void);
extern rtx gen_xabort (rtx);
extern rtx gen_xtest_1 (void);
extern rtx gen_clwb (rtx);
extern rtx gen_clflushopt (rtx);
extern rtx gen_mwaitx (rtx, rtx, rtx);
extern rtx gen_monitorx_si (rtx, rtx, rtx);
extern rtx gen_monitorx_di (rtx, rtx, rtx);
extern rtx gen_clzero_si (rtx);
extern rtx gen_clzero_di (rtx);
extern rtx gen_move_size_reloc_si (rtx, rtx);
extern rtx gen_move_size_reloc_di (rtx, rtx);
extern rtx gen_sse_movntq (rtx, rtx);
extern rtx gen_mmx_ieee_maxv2sf3 (rtx, rtx, rtx);
extern rtx gen_mmx_ieee_minv2sf3 (rtx, rtx, rtx);
extern rtx gen_mmx_rcpv2sf2 (rtx, rtx);
extern rtx gen_mmx_rcpit1v2sf3 (rtx, rtx, rtx);
extern rtx gen_mmx_rcpit2v2sf3 (rtx, rtx, rtx);
extern rtx gen_mmx_rsqrtv2sf2 (rtx, rtx);
extern rtx gen_mmx_rsqit1v2sf3 (rtx, rtx, rtx);
extern rtx gen_mmx_haddv2sf3 (rtx, rtx, rtx);
extern rtx gen_mmx_hsubv2sf3 (rtx, rtx, rtx);
extern rtx gen_mmx_addsubv2sf3 (rtx, rtx, rtx);
extern rtx gen_mmx_gtv2sf3 (rtx, rtx, rtx);
extern rtx gen_mmx_gev2sf3 (rtx, rtx, rtx);
extern rtx gen_mmx_pf2id (rtx, rtx);
extern rtx gen_mmx_pf2iw (rtx, rtx);
extern rtx gen_mmx_pi2fw (rtx, rtx);
extern rtx gen_mmx_floatv2si2 (rtx, rtx);
extern rtx gen_mmx_pswapdv2sf2 (rtx, rtx);
extern rtx gen_mmx_ashrv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_ashrv2si3 (rtx, rtx, rtx);
extern rtx gen_mmx_ashlv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_lshrv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_ashlv2si3 (rtx, rtx, rtx);
extern rtx gen_mmx_lshrv2si3 (rtx, rtx, rtx);
extern rtx gen_mmx_ashlv1di3 (rtx, rtx, rtx);
extern rtx gen_mmx_lshrv1di3 (rtx, rtx, rtx);
extern rtx gen_mmx_gtv8qi3 (rtx, rtx, rtx);
extern rtx gen_mmx_gtv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_gtv2si3 (rtx, rtx, rtx);
extern rtx gen_mmx_andnotv8qi3 (rtx, rtx, rtx);
extern rtx gen_mmx_andnotv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_andnotv2si3 (rtx, rtx, rtx);
extern rtx gen_mmx_packsswb (rtx, rtx, rtx);
extern rtx gen_mmx_packssdw (rtx, rtx, rtx);
extern rtx gen_mmx_packuswb (rtx, rtx, rtx);
extern rtx gen_mmx_punpckhbw (rtx, rtx, rtx);
extern rtx gen_mmx_punpcklbw (rtx, rtx, rtx);
extern rtx gen_mmx_punpckhwd (rtx, rtx, rtx);
extern rtx gen_mmx_punpcklwd (rtx, rtx, rtx);
extern rtx gen_mmx_punpckhdq (rtx, rtx, rtx);
extern rtx gen_mmx_punpckldq (rtx, rtx, rtx);
extern rtx gen_mmx_pextrw (rtx, rtx, rtx);
extern rtx gen_mmx_pshufw_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mmx_pswapdv2si2 (rtx, rtx);
extern rtx gen_mmx_psadbw (rtx, rtx, rtx);
extern rtx gen_mmx_pmovmskb (rtx, rtx);
extern rtx gen_avx512f_loadv16si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_loadv8si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_loadv4si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_loadv8di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_loadv4di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_loadv2di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_loadv16sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_loadv8sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_loadv4sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_loadv8df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_loadv4df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_loadv2df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_loadv64qi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_loadv16qi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_loadv32qi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_loadv32hi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_loadv16hi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_loadv8hi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_blendmv16si (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_blendmv8si (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_blendmv4si (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_blendmv8di (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_blendmv4di (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_blendmv2di (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_blendmv16sf (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_blendmv8sf (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_blendmv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_blendmv8df (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_blendmv4df (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_blendmv2df (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_blendmv64qi (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_blendmv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_blendmv32qi (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_blendmv32hi (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_blendmv16hi (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_blendmv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_storev16si_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storev8si_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storev4si_mask (rtx, rtx, rtx);
extern rtx gen_avx512f_storev8di_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storev4di_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storev2di_mask (rtx, rtx, rtx);
extern rtx gen_avx512f_storev16sf_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storev8sf_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storev4sf_mask (rtx, rtx, rtx);
extern rtx gen_avx512f_storev8df_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storev4df_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storev2df_mask (rtx, rtx, rtx);
extern rtx gen_avx512bw_storev64qi_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storev16qi_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storev32qi_mask (rtx, rtx, rtx);
extern rtx gen_avx512bw_storev32hi_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storev16hi_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storev8hi_mask (rtx, rtx, rtx);
extern rtx gen_sse2_movq128 (rtx, rtx);
extern rtx gen_movdi_to_sse (rtx, rtx);
extern rtx gen_avx512f_storeups512 (rtx, rtx);
extern rtx gen_avx_storeups256 (rtx, rtx);
extern rtx gen_sse_storeups (rtx, rtx);
extern rtx gen_avx512f_storeupd512 (rtx, rtx);
extern rtx gen_avx_storeupd256 (rtx, rtx);
extern rtx gen_sse2_storeupd (rtx, rtx);
extern rtx gen_avx512f_storeups512_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storeups256_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storeups_mask (rtx, rtx, rtx);
extern rtx gen_avx512f_storeupd512_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storeupd256_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storeupd_mask (rtx, rtx, rtx);
extern rtx gen_avx_storedquv32qi (rtx, rtx);
extern rtx gen_sse2_storedquv16qi (rtx, rtx);
extern rtx gen_avx512f_storedquv64qi (rtx, rtx);
extern rtx gen_avx512bw_storedquv32hi (rtx, rtx);
extern rtx gen_avx512vl_storedquv8hi (rtx, rtx);
extern rtx gen_avx512vl_storedquv16hi (rtx, rtx);
extern rtx gen_avx512f_storedquv16si (rtx, rtx);
extern rtx gen_avx_storedquv8si (rtx, rtx);
extern rtx gen_sse2_storedquv4si (rtx, rtx);
extern rtx gen_avx512f_storedquv8di (rtx, rtx);
extern rtx gen_avx512vl_storedquv4di (rtx, rtx);
extern rtx gen_avx512vl_storedquv2di (rtx, rtx);
extern rtx gen_avx512f_storedquv16si_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storedquv8si_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storedquv4si_mask (rtx, rtx, rtx);
extern rtx gen_avx512f_storedquv8di_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storedquv4di_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storedquv2di_mask (rtx, rtx, rtx);
extern rtx gen_avx512bw_storedquv64qi_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storedquv16qi_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storedquv32qi_mask (rtx, rtx, rtx);
extern rtx gen_avx512bw_storedquv32hi_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storedquv16hi_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_storedquv8hi_mask (rtx, rtx, rtx);
extern rtx gen_avx_lddqu256 (rtx, rtx);
extern rtx gen_sse3_lddqu (rtx, rtx);
extern rtx gen_sse2_movntisi (rtx, rtx);
extern rtx gen_sse2_movntidi (rtx, rtx);
extern rtx gen_avx512f_movntv16sf (rtx, rtx);
extern rtx gen_avx_movntv8sf (rtx, rtx);
extern rtx gen_sse_movntv4sf (rtx, rtx);
extern rtx gen_avx512f_movntv8df (rtx, rtx);
extern rtx gen_avx_movntv4df (rtx, rtx);
extern rtx gen_sse2_movntv2df (rtx, rtx);
extern rtx gen_avx512f_movntv8di (rtx, rtx);
extern rtx gen_avx_movntv4di (rtx, rtx);
extern rtx gen_sse2_movntv2di (rtx, rtx);
extern rtx gen_sse_vmaddv4sf3 (rtx, rtx, rtx);
extern rtx gen_sse_vmaddv4sf3_round (rtx, rtx, rtx, rtx);
extern rtx gen_sse_vmsubv4sf3 (rtx, rtx, rtx);
extern rtx gen_sse_vmsubv4sf3_round (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_vmaddv2df3 (rtx, rtx, rtx);
extern rtx gen_sse2_vmaddv2df3_round (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_vmsubv2df3 (rtx, rtx, rtx);
extern rtx gen_sse2_vmsubv2df3_round (rtx, rtx, rtx, rtx);
extern rtx gen_sse_vmmulv4sf3 (rtx, rtx, rtx);
extern rtx gen_sse_vmmulv4sf3_round (rtx, rtx, rtx, rtx);
extern rtx gen_sse_vmdivv4sf3 (rtx, rtx, rtx);
extern rtx gen_sse_vmdivv4sf3_round (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_vmmulv2df3 (rtx, rtx, rtx);
extern rtx gen_sse2_vmmulv2df3_round (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_vmdivv2df3 (rtx, rtx, rtx);
extern rtx gen_sse2_vmdivv2df3_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_divv16sf3 (rtx, rtx, rtx);
extern rtx gen_avx512f_divv16sf3_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_divv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_divv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_divv8sf3 (rtx, rtx, rtx);
static inline rtx gen_avx_divv8sf3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_avx_divv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_avx_divv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx_divv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx_divv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_sse_divv4sf3 (rtx, rtx, rtx);
static inline rtx gen_sse_divv4sf3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_sse_divv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_sse_divv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_sse_divv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_sse_divv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512f_divv8df3 (rtx, rtx, rtx);
extern rtx gen_avx512f_divv8df3_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_divv8df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_divv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_divv4df3 (rtx, rtx, rtx);
static inline rtx gen_avx_divv4df3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_avx_divv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_avx_divv4df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx_divv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx_divv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_sse2_divv2df3 (rtx, rtx, rtx);
static inline rtx gen_sse2_divv2df3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_sse2_divv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_sse2_divv2df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_sse2_divv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_sse2_divv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx_rcpv8sf2 (rtx, rtx);
extern rtx gen_sse_rcpv4sf2 (rtx, rtx);
extern rtx gen_sse_vmrcpv4sf2 (rtx, rtx, rtx);
extern rtx gen_rcp14v16sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_rcp14v8sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_rcp14v4sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_rcp14v8df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_rcp14v4df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_rcp14v2df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_srcp14v4sf (rtx, rtx, rtx);
extern rtx gen_srcp14v2df (rtx, rtx, rtx);
extern rtx gen_avx512f_sqrtv16sf2 (rtx, rtx);
extern rtx gen_avx512f_sqrtv16sf2_round (rtx, rtx, rtx);
extern rtx gen_avx512f_sqrtv16sf2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sqrtv16sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_sqrtv8sf2 (rtx, rtx);
static inline rtx gen_avx_sqrtv8sf2_round (rtx, rtx, rtx);
static inline rtx
gen_avx_sqrtv8sf2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_avx_sqrtv8sf2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_avx_sqrtv8sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx_sqrtv8sf2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_sse_sqrtv4sf2 (rtx, rtx);
static inline rtx gen_sse_sqrtv4sf2_round (rtx, rtx, rtx);
static inline rtx
gen_sse_sqrtv4sf2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_sse_sqrtv4sf2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_sse_sqrtv4sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_sse_sqrtv4sf2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_avx512f_sqrtv8df2 (rtx, rtx);
extern rtx gen_avx512f_sqrtv8df2_round (rtx, rtx, rtx);
extern rtx gen_avx512f_sqrtv8df2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sqrtv8df2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_sqrtv4df2 (rtx, rtx);
static inline rtx gen_avx_sqrtv4df2_round (rtx, rtx, rtx);
static inline rtx
gen_avx_sqrtv4df2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_avx_sqrtv4df2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_avx_sqrtv4df2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx_sqrtv4df2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_sse2_sqrtv2df2 (rtx, rtx);
static inline rtx gen_sse2_sqrtv2df2_round (rtx, rtx, rtx);
static inline rtx
gen_sse2_sqrtv2df2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_sse2_sqrtv2df2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_sse2_sqrtv2df2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_sse2_sqrtv2df2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_sse_vmsqrtv4sf2 (rtx, rtx, rtx);
extern rtx gen_sse_vmsqrtv4sf2_round (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_vmsqrtv2df2 (rtx, rtx, rtx);
extern rtx gen_sse2_vmsqrtv2df2_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx_rsqrtv8sf2 (rtx, rtx);
extern rtx gen_sse_rsqrtv4sf2 (rtx, rtx);
extern rtx gen_rsqrt14v16sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_rsqrt14v8sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_rsqrt14v4sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_rsqrt14v8df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_rsqrt14v4df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_rsqrt14v2df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_rsqrt14v4sf (rtx, rtx, rtx);
extern rtx gen_rsqrt14v2df (rtx, rtx, rtx);
extern rtx gen_sse_vmrsqrtv4sf2 (rtx, rtx, rtx);
extern rtx gen_ieee_maxv16sf3 (rtx, rtx, rtx);
extern rtx gen_ieee_maxv16sf3_round (rtx, rtx, rtx, rtx);
extern rtx gen_ieee_maxv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ieee_maxv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ieee_minv16sf3 (rtx, rtx, rtx);
extern rtx gen_ieee_minv16sf3_round (rtx, rtx, rtx, rtx);
extern rtx gen_ieee_minv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ieee_minv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ieee_maxv8sf3 (rtx, rtx, rtx);
static inline rtx gen_ieee_maxv8sf3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_ieee_maxv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_ieee_maxv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_ieee_maxv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_ieee_maxv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_ieee_minv8sf3 (rtx, rtx, rtx);
static inline rtx gen_ieee_minv8sf3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_ieee_minv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_ieee_minv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_ieee_minv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_ieee_minv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_ieee_maxv4sf3 (rtx, rtx, rtx);
static inline rtx gen_ieee_maxv4sf3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_ieee_maxv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_ieee_maxv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_ieee_maxv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_ieee_maxv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_ieee_minv4sf3 (rtx, rtx, rtx);
static inline rtx gen_ieee_minv4sf3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_ieee_minv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_ieee_minv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_ieee_minv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_ieee_minv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_ieee_maxv8df3 (rtx, rtx, rtx);
extern rtx gen_ieee_maxv8df3_round (rtx, rtx, rtx, rtx);
extern rtx gen_ieee_maxv8df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ieee_maxv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ieee_minv8df3 (rtx, rtx, rtx);
extern rtx gen_ieee_minv8df3_round (rtx, rtx, rtx, rtx);
extern rtx gen_ieee_minv8df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ieee_minv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ieee_maxv4df3 (rtx, rtx, rtx);
static inline rtx gen_ieee_maxv4df3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_ieee_maxv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_ieee_maxv4df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_ieee_maxv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_ieee_maxv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_ieee_minv4df3 (rtx, rtx, rtx);
static inline rtx gen_ieee_minv4df3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_ieee_minv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_ieee_minv4df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_ieee_minv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_ieee_minv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_ieee_maxv2df3 (rtx, rtx, rtx);
static inline rtx gen_ieee_maxv2df3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_ieee_maxv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_ieee_maxv2df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_ieee_maxv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_ieee_maxv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_ieee_minv2df3 (rtx, rtx, rtx);
static inline rtx gen_ieee_minv2df3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_ieee_minv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_ieee_minv2df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_ieee_minv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_ieee_minv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_sse_vmsmaxv4sf3 (rtx, rtx, rtx);
extern rtx gen_sse_vmsmaxv4sf3_round (rtx, rtx, rtx, rtx);
extern rtx gen_sse_vmsminv4sf3 (rtx, rtx, rtx);
extern rtx gen_sse_vmsminv4sf3_round (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_vmsmaxv2df3 (rtx, rtx, rtx);
extern rtx gen_sse2_vmsmaxv2df3_round (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_vmsminv2df3 (rtx, rtx, rtx);
extern rtx gen_sse2_vmsminv2df3_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx_addsubv4df3 (rtx, rtx, rtx);
extern rtx gen_sse3_addsubv2df3 (rtx, rtx, rtx);
extern rtx gen_avx_addsubv8sf3 (rtx, rtx, rtx);
extern rtx gen_sse3_addsubv4sf3 (rtx, rtx, rtx);
extern rtx gen_avx_haddv4df3 (rtx, rtx, rtx);
extern rtx gen_avx_hsubv4df3 (rtx, rtx, rtx);
extern rtx gen_sse3_hsubv2df3 (rtx, rtx, rtx);
extern rtx gen_avx_haddv8sf3 (rtx, rtx, rtx);
extern rtx gen_avx_hsubv8sf3 (rtx, rtx, rtx);
extern rtx gen_sse3_haddv4sf3 (rtx, rtx, rtx);
extern rtx gen_sse3_hsubv4sf3 (rtx, rtx, rtx);
extern rtx gen_reducepv16sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_reducepv8sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_reducepv4sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_reducepv8df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_reducepv4df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_reducepv2df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_reducesv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_reducesv2df (rtx, rtx, rtx, rtx);
extern rtx gen_avx_cmpv8sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx_cmpv4sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx_cmpv4df3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx_cmpv2df3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx_vmcmpv4sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx_vmcmpv2df3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx_maskcmpv8sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_sse_maskcmpv4sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx_maskcmpv4df3 (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_maskcmpv2df3 (rtx, rtx, rtx, rtx);
extern rtx gen_sse_vmmaskcmpv4sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_vmmaskcmpv2df3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cmpv16si3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cmpv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cmpv16si3_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cmpv16si3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv8si3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv8si3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_cmpv8si3_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_cmpv8si3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_avx512vl_cmpv8si3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_cmpv8si3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512vl_cmpv4si3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv4si3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_cmpv4si3_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_cmpv4si3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_avx512vl_cmpv4si3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_cmpv4si3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512f_cmpv8di3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cmpv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cmpv8di3_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cmpv8di3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv4di3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv4di3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_cmpv4di3_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_cmpv4di3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_avx512vl_cmpv4di3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_cmpv4di3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512vl_cmpv2di3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv2di3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_cmpv2di3_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_cmpv2di3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_avx512vl_cmpv2di3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_cmpv2di3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512f_cmpv16sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cmpv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cmpv16sf3_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cmpv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv8sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_cmpv8sf3_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_cmpv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_avx512vl_cmpv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_cmpv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512vl_cmpv4sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_cmpv4sf3_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_cmpv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_avx512vl_cmpv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_cmpv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512f_cmpv8df3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cmpv8df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cmpv8df3_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cmpv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv4df3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv4df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_cmpv4df3_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_cmpv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_avx512vl_cmpv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_cmpv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512vl_cmpv2df3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv2df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_cmpv2df3_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_cmpv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
static inline rtx gen_avx512vl_cmpv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_cmpv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512bw_cmpv64qi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_cmpv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv16qi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv32qi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_cmpv32hi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_cmpv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv16hi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv8hi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_cmpv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_ucmpv64qi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_ucmpv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ucmpv16qi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ucmpv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ucmpv32qi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ucmpv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_ucmpv32hi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_ucmpv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ucmpv16hi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ucmpv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ucmpv8hi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ucmpv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_ucmpv16si3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_ucmpv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ucmpv8si3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ucmpv8si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ucmpv4si3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ucmpv4si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_ucmpv8di3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_ucmpv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ucmpv4di3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ucmpv4di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ucmpv2di3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ucmpv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vmcmpv4sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vmcmpv4sf3_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vmcmpv2df3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vmcmpv2df3_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vmcmpv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vmcmpv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vmcmpv2df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vmcmpv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_maskcmpv16sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_maskcmpv8sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_maskcmpv4sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_maskcmpv8df3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_maskcmpv4df3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_maskcmpv2df3 (rtx, rtx, rtx, rtx);
extern rtx gen_sse_comi (rtx, rtx);
extern rtx gen_sse_comi_round (rtx, rtx, rtx);
extern rtx gen_sse2_comi (rtx, rtx);
extern rtx gen_sse2_comi_round (rtx, rtx, rtx);
extern rtx gen_sse_ucomi (rtx, rtx);
extern rtx gen_sse_ucomi_round (rtx, rtx, rtx);
extern rtx gen_sse2_ucomi (rtx, rtx);
extern rtx gen_sse2_ucomi_round (rtx, rtx, rtx);
extern rtx gen_avx_andnotv8sf3 (rtx, rtx, rtx);
extern rtx gen_avx_andnotv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse_andnotv4sf3 (rtx, rtx, rtx);
extern rtx gen_sse_andnotv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_andnotv4df3 (rtx, rtx, rtx);
extern rtx gen_avx_andnotv4df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_andnotv2df3 (rtx, rtx, rtx);
extern rtx gen_sse2_andnotv2df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_andnotv16sf3 (rtx, rtx, rtx);
extern rtx gen_avx512f_andnotv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_andnotv8df3 (rtx, rtx, rtx);
extern rtx gen_avx512f_andnotv8df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmadd_v16sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmadd_v16sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmadd_v8sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fmadd_v8sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fmadd_v8sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fmadd_v4sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fmadd_v4sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fmadd_v4sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fmadd_v8df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmadd_v8df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmadd_v4df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fmadd_v4df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fmadd_v4df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fmadd_v2df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fmadd_v2df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fmadd_v2df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_avx512f_fmadd_v16sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmadd_v16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v8sf_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fmadd_v8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fmadd_v8sf_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512vl_fmadd_v4sf_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fmadd_v4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fmadd_v4sf_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512f_fmadd_v8df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmadd_v8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v4df_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fmadd_v4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fmadd_v4df_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512vl_fmadd_v2df_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fmadd_v2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fmadd_v2df_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512f_fmadd_v16sf_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmadd_v16sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v8sf_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v8sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v4sf_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v4sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmadd_v8df_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmadd_v8df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v4df_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v4df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v2df_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v2df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmsub_v16sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmsub_v16sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmsub_v8sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fmsub_v8sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fmsub_v8sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fmsub_v4sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fmsub_v4sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fmsub_v4sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fmsub_v8df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmsub_v8df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmsub_v4df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fmsub_v4df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fmsub_v4df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fmsub_v2df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fmsub_v2df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fmsub_v2df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_avx512f_fmsub_v16sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmsub_v16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsub_v8sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsub_v8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsub_v4sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsub_v4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmsub_v8df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmsub_v8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsub_v4df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsub_v4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsub_v2df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsub_v2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmsub_v16sf_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmsub_v16sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsub_v8sf_mask3 (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fmsub_v8sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fmsub_v8sf_mask3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512vl_fmsub_v4sf_mask3 (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fmsub_v4sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fmsub_v4sf_mask3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512f_fmsub_v8df_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmsub_v8df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsub_v4df_mask3 (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fmsub_v4df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fmsub_v4df_mask3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512vl_fmsub_v2df_mask3 (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fmsub_v2df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fmsub_v2df_mask3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_fma_fnmadd_v16sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fnmadd_v16sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fnmadd_v8sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fnmadd_v8sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fnmadd_v8sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fnmadd_v4sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fnmadd_v4sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fnmadd_v4sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fnmadd_v8df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fnmadd_v8df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fnmadd_v4df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fnmadd_v4df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fnmadd_v4df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fnmadd_v2df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fnmadd_v2df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fnmadd_v2df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_avx512f_fnmadd_v16sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fnmadd_v16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fnmadd_v8sf_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fnmadd_v8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fnmadd_v8sf_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512vl_fnmadd_v4sf_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fnmadd_v4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fnmadd_v4sf_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512f_fnmadd_v8df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fnmadd_v8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fnmadd_v4df_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fnmadd_v4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fnmadd_v4df_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512vl_fnmadd_v2df_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fnmadd_v2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fnmadd_v2df_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512f_fnmadd_v16sf_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fnmadd_v16sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fnmadd_v8sf_mask3 (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fnmadd_v8sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fnmadd_v8sf_mask3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512vl_fnmadd_v4sf_mask3 (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fnmadd_v4sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fnmadd_v4sf_mask3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512f_fnmadd_v8df_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fnmadd_v8df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fnmadd_v4df_mask3 (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fnmadd_v4df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fnmadd_v4df_mask3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512vl_fnmadd_v2df_mask3 (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fnmadd_v2df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fnmadd_v2df_mask3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_fma_fnmsub_v16sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fnmsub_v16sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fnmsub_v8sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fnmsub_v8sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fnmsub_v8sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fnmsub_v4sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fnmsub_v4sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fnmsub_v4sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fnmsub_v8df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fnmsub_v8df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fnmsub_v4df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fnmsub_v4df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fnmsub_v4df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fnmsub_v2df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fnmsub_v2df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fnmsub_v2df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_avx512f_fnmsub_v16sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fnmsub_v16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fnmsub_v8sf_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fnmsub_v8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fnmsub_v8sf_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512vl_fnmsub_v4sf_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fnmsub_v4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fnmsub_v4sf_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512f_fnmsub_v8df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fnmsub_v8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fnmsub_v4df_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fnmsub_v4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fnmsub_v4df_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512vl_fnmsub_v2df_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512vl_fnmsub_v2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512vl_fnmsub_v2df_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_avx512f_fnmsub_v16sf_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fnmsub_v16sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fnmsub_v8sf_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fnmsub_v8sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fnmsub_v4sf_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fnmsub_v4sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fnmsub_v8df_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fnmsub_v8df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fnmsub_v4df_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fnmsub_v4df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fnmsub_v2df_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fnmsub_v2df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmaddsub_v16sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmaddsub_v16sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmaddsub_v8sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fmaddsub_v8sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fmaddsub_v8sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fmaddsub_v4sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fmaddsub_v4sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fmaddsub_v4sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fmaddsub_v8df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmaddsub_v8df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmaddsub_v4df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fmaddsub_v4df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fmaddsub_v4df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fmaddsub_v2df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fmaddsub_v2df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fmaddsub_v2df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_avx512f_fmaddsub_v16sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmaddsub_v16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v8sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v4sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmaddsub_v8df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmaddsub_v8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v4df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v2df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmaddsub_v16sf_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmaddsub_v16sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v8sf_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v8sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v4sf_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v4sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmaddsub_v8df_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmaddsub_v8df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v4df_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v4df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v2df_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v2df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmsubadd_v16sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmsubadd_v16sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmsubadd_v8sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fmsubadd_v8sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fmsubadd_v8sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fmsubadd_v4sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fmsubadd_v4sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fmsubadd_v4sf_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fmsubadd_v8df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmsubadd_v8df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma_fmsubadd_v4df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fmsubadd_v4df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fmsubadd_v4df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_fma_fmsubadd_v2df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_fma_fmsubadd_v2df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fma_fmsubadd_v2df_maskz_1_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_avx512f_fmsubadd_v16sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmsubadd_v16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsubadd_v8sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsubadd_v8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsubadd_v4sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsubadd_v4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmsubadd_v8df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmsubadd_v8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsubadd_v4df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsubadd_v4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsubadd_v2df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsubadd_v2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmsubadd_v16sf_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmsubadd_v16sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsubadd_v8sf_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsubadd_v8sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsubadd_v4sf_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsubadd_v4sf_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmsubadd_v8df_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmsubadd_v8df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsubadd_v4df_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsubadd_v4df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsubadd_v2df_mask3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmsubadd_v2df_mask3_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse_cvtpi2ps (rtx, rtx, rtx);
extern rtx gen_sse_cvtps2pi (rtx, rtx);
extern rtx gen_sse_cvttps2pi (rtx, rtx);
extern rtx gen_sse_cvtsi2ss (rtx, rtx, rtx);
extern rtx gen_sse_cvtsi2ss_round (rtx, rtx, rtx, rtx);
extern rtx gen_sse_cvtsi2ssq (rtx, rtx, rtx);
extern rtx gen_sse_cvtsi2ssq_round (rtx, rtx, rtx, rtx);
extern rtx gen_sse_cvtss2si (rtx, rtx);
extern rtx gen_sse_cvtss2si_round (rtx, rtx, rtx);
extern rtx gen_sse_cvtss2si_2 (rtx, rtx);
extern rtx gen_sse_cvtss2siq (rtx, rtx);
extern rtx gen_sse_cvtss2siq_round (rtx, rtx, rtx);
extern rtx gen_sse_cvtss2siq_2 (rtx, rtx);
extern rtx gen_sse_cvttss2si (rtx, rtx);
extern rtx gen_sse_cvttss2si_round (rtx, rtx, rtx);
extern rtx gen_sse_cvttss2siq (rtx, rtx);
extern rtx gen_sse_cvttss2siq_round (rtx, rtx, rtx);
extern rtx gen_cvtusi2ss32 (rtx, rtx, rtx);
extern rtx gen_cvtusi2ss32_round (rtx, rtx, rtx, rtx);
extern rtx gen_cvtusi2sd32 (rtx, rtx, rtx);
static inline rtx gen_cvtusi2sd32_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_cvtusi2sd32_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_cvtusi2ss64 (rtx, rtx, rtx);
extern rtx gen_cvtusi2ss64_round (rtx, rtx, rtx, rtx);
extern rtx gen_cvtusi2sd64 (rtx, rtx, rtx);
extern rtx gen_cvtusi2sd64_round (rtx, rtx, rtx, rtx);
extern rtx gen_floatv16siv16sf2 (rtx, rtx);
extern rtx gen_floatv16siv16sf2_round (rtx, rtx, rtx);
extern rtx gen_floatv16siv16sf2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_floatv16siv16sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_floatv8siv8sf2 (rtx, rtx);
static inline rtx gen_floatv8siv8sf2_round (rtx, rtx, rtx);
static inline rtx
gen_floatv8siv8sf2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_floatv8siv8sf2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_floatv8siv8sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_floatv8siv8sf2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_floatv4siv4sf2 (rtx, rtx);
static inline rtx gen_floatv4siv4sf2_round (rtx, rtx, rtx);
static inline rtx
gen_floatv4siv4sf2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_floatv4siv4sf2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_floatv4siv4sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_floatv4siv4sf2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_ufloatv16siv16sf2 (rtx, rtx);
extern rtx gen_ufloatv16siv16sf2_round (rtx, rtx, rtx);
extern rtx gen_ufloatv16siv16sf2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufloatv16siv16sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ufloatv8siv8sf2 (rtx, rtx);
extern rtx gen_ufloatv8siv8sf2_round (rtx, rtx, rtx);
extern rtx gen_ufloatv8siv8sf2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufloatv8siv8sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ufloatv4siv4sf2 (rtx, rtx);
extern rtx gen_ufloatv4siv4sf2_round (rtx, rtx, rtx);
extern rtx gen_ufloatv4siv4sf2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufloatv4siv4sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_fix_notruncv8sfv8si (rtx, rtx);
extern rtx gen_avx_fix_notruncv8sfv8si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_fix_notruncv4sfv4si (rtx, rtx);
extern rtx gen_sse2_fix_notruncv4sfv4si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fix_notruncv16sfv16si (rtx, rtx);
extern rtx gen_avx512f_fix_notruncv16sfv16si_round (rtx, rtx, rtx);
extern rtx gen_avx512f_fix_notruncv16sfv16si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fix_notruncv16sfv16si_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_ufix_notruncv16sfv16si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_ufix_notruncv16sfv16si_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ufix_notruncv8sfv8si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ufix_notruncv8sfv8si_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ufix_notruncv4sfv4si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ufix_notruncv4sfv4si_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_cvtps2qqv8di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_cvtps2qqv8di_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_cvtps2qqv4di_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_avx512dq_cvtps2qqv4di_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512dq_cvtps2qqv4di_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_avx512dq_cvtps2qqv2di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_cvtps2uqqv8di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_cvtps2uqqv8di_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_cvtps2uqqv4di_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_avx512dq_cvtps2uqqv4di_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512dq_cvtps2uqqv4di_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_avx512dq_cvtps2uqqv2di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_fix_truncv16sfv16si2 (rtx, rtx);
extern rtx gen_fix_truncv16sfv16si2_round (rtx, rtx, rtx);
extern rtx gen_fix_truncv16sfv16si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_fix_truncv16sfv16si2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ufix_truncv16sfv16si2 (rtx, rtx);
extern rtx gen_ufix_truncv16sfv16si2_round (rtx, rtx, rtx);
extern rtx gen_ufix_truncv16sfv16si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufix_truncv16sfv16si2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fix_truncv8sfv8si2 (rtx, rtx);
extern rtx gen_fix_truncv8sfv8si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_fix_truncv4sfv4si2 (rtx, rtx);
extern rtx gen_fix_truncv4sfv4si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_cvtpi2pd (rtx, rtx);
extern rtx gen_sse2_cvtpd2pi (rtx, rtx);
extern rtx gen_sse2_cvttpd2pi (rtx, rtx);
extern rtx gen_sse2_cvtsi2sd (rtx, rtx, rtx);
extern rtx gen_sse2_cvtsi2sdq (rtx, rtx, rtx);
extern rtx gen_sse2_cvtsi2sdq_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vcvtss2usi (rtx, rtx);
extern rtx gen_avx512f_vcvtss2usi_round (rtx, rtx, rtx);
extern rtx gen_avx512f_vcvtss2usiq (rtx, rtx);
extern rtx gen_avx512f_vcvtss2usiq_round (rtx, rtx, rtx);
extern rtx gen_avx512f_vcvttss2usi (rtx, rtx);
extern rtx gen_avx512f_vcvttss2usi_round (rtx, rtx, rtx);
extern rtx gen_avx512f_vcvttss2usiq (rtx, rtx);
extern rtx gen_avx512f_vcvttss2usiq_round (rtx, rtx, rtx);
extern rtx gen_avx512f_vcvtsd2usi (rtx, rtx);
extern rtx gen_avx512f_vcvtsd2usi_round (rtx, rtx, rtx);
extern rtx gen_avx512f_vcvtsd2usiq (rtx, rtx);
extern rtx gen_avx512f_vcvtsd2usiq_round (rtx, rtx, rtx);
extern rtx gen_avx512f_vcvttsd2usi (rtx, rtx);
extern rtx gen_avx512f_vcvttsd2usi_round (rtx, rtx, rtx);
extern rtx gen_avx512f_vcvttsd2usiq (rtx, rtx);
extern rtx gen_avx512f_vcvttsd2usiq_round (rtx, rtx, rtx);
extern rtx gen_sse2_cvtsd2si (rtx, rtx);
extern rtx gen_sse2_cvtsd2si_round (rtx, rtx, rtx);
extern rtx gen_sse2_cvtsd2si_2 (rtx, rtx);
extern rtx gen_sse2_cvtsd2siq (rtx, rtx);
extern rtx gen_sse2_cvtsd2siq_round (rtx, rtx, rtx);
extern rtx gen_sse2_cvtsd2siq_2 (rtx, rtx);
extern rtx gen_sse2_cvttsd2si (rtx, rtx);
extern rtx gen_sse2_cvttsd2si_round (rtx, rtx, rtx);
extern rtx gen_sse2_cvttsd2siq (rtx, rtx);
extern rtx gen_sse2_cvttsd2siq_round (rtx, rtx, rtx);
extern rtx gen_floatv8siv8df2 (rtx, rtx);
extern rtx gen_floatv8siv8df2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_floatv4siv4df2 (rtx, rtx);
extern rtx gen_floatv4siv4df2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_floatv8div8df2 (rtx, rtx);
extern rtx gen_floatv8div8df2_round (rtx, rtx, rtx);
extern rtx gen_floatv8div8df2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_floatv8div8df2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ufloatv8div8df2 (rtx, rtx);
extern rtx gen_ufloatv8div8df2_round (rtx, rtx, rtx);
extern rtx gen_ufloatv8div8df2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufloatv8div8df2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_floatv4div4df2 (rtx, rtx);
extern rtx gen_floatv4div4df2_round (rtx, rtx, rtx);
extern rtx gen_floatv4div4df2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_floatv4div4df2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ufloatv4div4df2 (rtx, rtx);
extern rtx gen_ufloatv4div4df2_round (rtx, rtx, rtx);
extern rtx gen_ufloatv4div4df2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufloatv4div4df2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_floatv2div2df2 (rtx, rtx);
extern rtx gen_floatv2div2df2_round (rtx, rtx, rtx);
extern rtx gen_floatv2div2df2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_floatv2div2df2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ufloatv2div2df2 (rtx, rtx);
extern rtx gen_ufloatv2div2df2_round (rtx, rtx, rtx);
extern rtx gen_ufloatv2div2df2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufloatv2div2df2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_floatv8div8sf2 (rtx, rtx);
extern rtx gen_floatv8div8sf2_round (rtx, rtx, rtx);
extern rtx gen_floatv8div8sf2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_floatv8div8sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ufloatv8div8sf2 (rtx, rtx);
extern rtx gen_ufloatv8div8sf2_round (rtx, rtx, rtx);
extern rtx gen_ufloatv8div8sf2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufloatv8div8sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_floatv4div4sf2 (rtx, rtx);
static inline rtx gen_floatv4div4sf2_round (rtx, rtx, rtx);
static inline rtx
gen_floatv4div4sf2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_floatv4div4sf2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_floatv4div4sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_floatv4div4sf2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_ufloatv4div4sf2 (rtx, rtx);
static inline rtx gen_ufloatv4div4sf2_round (rtx, rtx, rtx);
static inline rtx
gen_ufloatv4div4sf2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_ufloatv4div4sf2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_ufloatv4div4sf2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_ufloatv4div4sf2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_floatv2div2sf2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufloatv2div2sf2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufloatv8siv8df2 (rtx, rtx);
extern rtx gen_ufloatv8siv8df2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufloatv4siv4df2 (rtx, rtx);
extern rtx gen_ufloatv4siv4df2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufloatv2siv2df2 (rtx, rtx);
extern rtx gen_ufloatv2siv2df2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cvtdq2pd512_2 (rtx, rtx);
extern rtx gen_avx_cvtdq2pd256_2 (rtx, rtx);
extern rtx gen_sse2_cvtdq2pd (rtx, rtx);
extern rtx gen_sse2_cvtdq2pd_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cvtpd2dq512 (rtx, rtx);
extern rtx gen_avx512f_cvtpd2dq512_round (rtx, rtx, rtx);
extern rtx gen_avx512f_cvtpd2dq512_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cvtpd2dq512_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_cvtpd2dq256 (rtx, rtx);
extern rtx gen_avx_cvtpd2dq256_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_cvtpd2dq (rtx, rtx);
extern rtx gen_sse2_cvtpd2dq_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufix_notruncv8dfv8si2 (rtx, rtx);
extern rtx gen_ufix_notruncv8dfv8si2_round (rtx, rtx, rtx);
extern rtx gen_ufix_notruncv8dfv8si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufix_notruncv8dfv8si2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ufix_notruncv4dfv4si2 (rtx, rtx);
extern rtx gen_ufix_notruncv4dfv4si2_round (rtx, rtx, rtx);
extern rtx gen_ufix_notruncv4dfv4si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufix_notruncv4dfv4si2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ufix_notruncv2dfv2si2 (rtx, rtx);
extern rtx gen_ufix_notruncv2dfv2si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_fix_truncv8dfv8si2 (rtx, rtx);
extern rtx gen_fix_truncv8dfv8si2_round (rtx, rtx, rtx);
extern rtx gen_fix_truncv8dfv8si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_fix_truncv8dfv8si2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ufix_truncv8dfv8si2 (rtx, rtx);
extern rtx gen_ufix_truncv8dfv8si2_round (rtx, rtx, rtx);
extern rtx gen_ufix_truncv8dfv8si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufix_truncv8dfv8si2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ufix_truncv2dfv2si2 (rtx, rtx);
extern rtx gen_ufix_truncv2dfv2si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_fix_truncv4dfv4si2 (rtx, rtx);
extern rtx gen_fix_truncv4dfv4si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufix_truncv4dfv4si2 (rtx, rtx);
extern rtx gen_ufix_truncv4dfv4si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_fix_truncv8dfv8di2 (rtx, rtx);
extern rtx gen_fix_truncv8dfv8di2_round (rtx, rtx, rtx);
extern rtx gen_fix_truncv8dfv8di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_fix_truncv8dfv8di2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ufix_truncv8dfv8di2 (rtx, rtx);
extern rtx gen_ufix_truncv8dfv8di2_round (rtx, rtx, rtx);
extern rtx gen_ufix_truncv8dfv8di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufix_truncv8dfv8di2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fix_truncv4dfv4di2 (rtx, rtx);
static inline rtx gen_fix_truncv4dfv4di2_round (rtx, rtx, rtx);
static inline rtx
gen_fix_truncv4dfv4di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_fix_truncv4dfv4di2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_fix_truncv4dfv4di2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fix_truncv4dfv4di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_ufix_truncv4dfv4di2 (rtx, rtx);
static inline rtx gen_ufix_truncv4dfv4di2_round (rtx, rtx, rtx);
static inline rtx
gen_ufix_truncv4dfv4di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_ufix_truncv4dfv4di2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_ufix_truncv4dfv4di2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_ufix_truncv4dfv4di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_fix_truncv2dfv2di2 (rtx, rtx);
static inline rtx gen_fix_truncv2dfv2di2_round (rtx, rtx, rtx);
static inline rtx
gen_fix_truncv2dfv2di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_fix_truncv2dfv2di2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_fix_truncv2dfv2di2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fix_truncv2dfv2di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_ufix_truncv2dfv2di2 (rtx, rtx);
static inline rtx gen_ufix_truncv2dfv2di2_round (rtx, rtx, rtx);
static inline rtx
gen_ufix_truncv2dfv2di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_ufix_truncv2dfv2di2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_ufix_truncv2dfv2di2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_ufix_truncv2dfv2di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_fix_notruncv8dfv8di2 (rtx, rtx);
extern rtx gen_fix_notruncv8dfv8di2_round (rtx, rtx, rtx);
extern rtx gen_fix_notruncv8dfv8di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_fix_notruncv8dfv8di2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fix_notruncv4dfv4di2 (rtx, rtx);
static inline rtx gen_fix_notruncv4dfv4di2_round (rtx, rtx, rtx);
static inline rtx
gen_fix_notruncv4dfv4di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_fix_notruncv4dfv4di2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_fix_notruncv4dfv4di2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fix_notruncv4dfv4di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_fix_notruncv2dfv2di2 (rtx, rtx);
static inline rtx gen_fix_notruncv2dfv2di2_round (rtx, rtx, rtx);
static inline rtx
gen_fix_notruncv2dfv2di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_fix_notruncv2dfv2di2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_fix_notruncv2dfv2di2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fix_notruncv2dfv2di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_ufix_notruncv8dfv8di2 (rtx, rtx);
extern rtx gen_ufix_notruncv8dfv8di2_round (rtx, rtx, rtx);
extern rtx gen_ufix_notruncv8dfv8di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufix_notruncv8dfv8di2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ufix_notruncv4dfv4di2 (rtx, rtx);
static inline rtx gen_ufix_notruncv4dfv4di2_round (rtx, rtx, rtx);
static inline rtx
gen_ufix_notruncv4dfv4di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_ufix_notruncv4dfv4di2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_ufix_notruncv4dfv4di2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_ufix_notruncv4dfv4di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_ufix_notruncv2dfv2di2 (rtx, rtx);
static inline rtx gen_ufix_notruncv2dfv2di2_round (rtx, rtx, rtx);
static inline rtx
gen_ufix_notruncv2dfv2di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_ufix_notruncv2dfv2di2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_ufix_notruncv2dfv2di2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_ufix_notruncv2dfv2di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_fix_truncv8sfv8di2 (rtx, rtx);
extern rtx gen_fix_truncv8sfv8di2_round (rtx, rtx, rtx);
extern rtx gen_fix_truncv8sfv8di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_fix_truncv8sfv8di2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ufix_truncv8sfv8di2 (rtx, rtx);
extern rtx gen_ufix_truncv8sfv8di2_round (rtx, rtx, rtx);
extern rtx gen_ufix_truncv8sfv8di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufix_truncv8sfv8di2_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fix_truncv4sfv4di2 (rtx, rtx);
static inline rtx gen_fix_truncv4sfv4di2_round (rtx, rtx, rtx);
static inline rtx
gen_fix_truncv4sfv4di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_fix_truncv4sfv4di2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_fix_truncv4sfv4di2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_fix_truncv4sfv4di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_ufix_truncv4sfv4di2 (rtx, rtx);
static inline rtx gen_ufix_truncv4sfv4di2_round (rtx, rtx, rtx);
static inline rtx
gen_ufix_truncv4sfv4di2_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_ufix_truncv4sfv4di2_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_ufix_truncv4sfv4di2_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_ufix_truncv4sfv4di2_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_fix_truncv2sfv2di2 (rtx, rtx);
extern rtx gen_fix_truncv2sfv2di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufix_truncv2sfv2di2 (rtx, rtx);
extern rtx gen_ufix_truncv2sfv2di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufix_truncv8sfv8si2 (rtx, rtx);
extern rtx gen_ufix_truncv8sfv8si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_ufix_truncv4sfv4si2 (rtx, rtx);
extern rtx gen_ufix_truncv4sfv4si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_cvttpd2dq (rtx, rtx);
extern rtx gen_sse2_cvttpd2dq_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_cvtsd2ss (rtx, rtx, rtx);
extern rtx gen_sse2_cvtsd2ss_round (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_cvtss2sd (rtx, rtx, rtx);
extern rtx gen_sse2_cvtss2sd_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cvtpd2ps512_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cvtpd2ps512_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_cvtpd2ps256 (rtx, rtx);
extern rtx gen_avx_cvtpd2ps256_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cvtps2pd512 (rtx, rtx);
extern rtx gen_avx512f_cvtps2pd512_round (rtx, rtx, rtx);
extern rtx gen_avx512f_cvtps2pd512_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_cvtps2pd512_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_cvtps2pd256 (rtx, rtx);
static inline rtx gen_avx_cvtps2pd256_round (rtx, rtx, rtx);
static inline rtx
gen_avx_cvtps2pd256_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
{
return 0;
}
extern rtx gen_avx_cvtps2pd256_mask (rtx, rtx, rtx, rtx);
static inline rtx gen_avx_cvtps2pd256_mask_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx_cvtps2pd256_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_vec_unpacks_lo_v16sf (rtx, rtx);
extern rtx gen_avx512bw_cvtb2maskv64qi (rtx, rtx);
extern rtx gen_avx512vl_cvtb2maskv16qi (rtx, rtx);
extern rtx gen_avx512vl_cvtb2maskv32qi (rtx, rtx);
extern rtx gen_avx512bw_cvtw2maskv32hi (rtx, rtx);
extern rtx gen_avx512vl_cvtw2maskv16hi (rtx, rtx);
extern rtx gen_avx512vl_cvtw2maskv8hi (rtx, rtx);
extern rtx gen_avx512f_cvtd2maskv16si (rtx, rtx);
extern rtx gen_avx512vl_cvtd2maskv8si (rtx, rtx);
extern rtx gen_avx512vl_cvtd2maskv4si (rtx, rtx);
extern rtx gen_avx512f_cvtq2maskv8di (rtx, rtx);
extern rtx gen_avx512vl_cvtq2maskv4di (rtx, rtx);
extern rtx gen_avx512vl_cvtq2maskv2di (rtx, rtx);
extern rtx gen_sse2_cvtps2pd (rtx, rtx);
extern rtx gen_sse2_cvtps2pd_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse_movhlps (rtx, rtx, rtx);
extern rtx gen_sse_movlhps (rtx, rtx, rtx);
extern rtx gen_avx512f_unpckhps512_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_unpckhps256 (rtx, rtx, rtx);
extern rtx gen_avx_unpckhps256_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_interleave_highv4sf (rtx, rtx, rtx);
extern rtx gen_vec_interleave_highv4sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_unpcklps512_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_unpcklps256 (rtx, rtx, rtx);
extern rtx gen_avx_unpcklps256_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_unpcklps128_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_interleave_lowv4sf (rtx, rtx, rtx);
extern rtx gen_avx_movshdup256 (rtx, rtx);
extern rtx gen_avx_movshdup256_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse3_movshdup (rtx, rtx);
extern rtx gen_sse3_movshdup_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_movshdup512_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx_movsldup256 (rtx, rtx);
extern rtx gen_avx_movsldup256_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse3_movsldup (rtx, rtx);
extern rtx gen_sse3_movsldup_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_movsldup512_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx_shufps256_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_shufps256_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse_shufps_v4sf_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse_shufps_v4si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse_shufps_v4sf (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse_storehps (rtx, rtx);
extern rtx gen_sse_loadhps (rtx, rtx, rtx);
extern rtx gen_sse_storelps (rtx, rtx);
extern rtx gen_sse_loadlps (rtx, rtx, rtx);
extern rtx gen_sse_movss (rtx, rtx, rtx);
extern rtx gen_avx2_vec_dupv8sf (rtx, rtx);
extern rtx gen_avx2_vec_dupv4sf (rtx, rtx);
extern rtx gen_avx2_vec_dupv8sf_1 (rtx, rtx);
extern rtx gen_avx512f_vec_dupv16sf_1 (rtx, rtx);
extern rtx gen_avx512f_vec_dupv8df_1 (rtx, rtx);
extern rtx gen_vec_setv4si_0 (rtx, rtx, rtx);
extern rtx gen_vec_setv4sf_0 (rtx, rtx, rtx);
extern rtx gen_sse4_1_insertps (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_vextractf64x2_1_maskm (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_vextracti64x2_1_maskm (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vextractf32x4_1_maskm (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vextracti32x4_1_maskm (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_vextractf64x2_1_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_vextracti64x2_1_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vextractf32x4_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vextracti32x4_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_lo_v8df_maskm (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_lo_v8di_maskm (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_lo_v8df (rtx, rtx);
extern rtx gen_vec_extract_lo_v8df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_lo_v8di (rtx, rtx);
extern rtx gen_vec_extract_lo_v8di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_hi_v8df_maskm (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_hi_v8di_maskm (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_hi_v8df (rtx, rtx);
extern rtx gen_vec_extract_hi_v8df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_hi_v8di (rtx, rtx);
extern rtx gen_vec_extract_hi_v8di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_hi_v16sf_maskm (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_hi_v16si_maskm (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_hi_v16sf (rtx, rtx);
extern rtx gen_vec_extract_hi_v16sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_hi_v16si (rtx, rtx);
extern rtx gen_vec_extract_hi_v16si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_lo_v16sf (rtx, rtx);
extern rtx gen_vec_extract_lo_v16sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_lo_v16si (rtx, rtx);
extern rtx gen_vec_extract_lo_v16si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_lo_v4di (rtx, rtx);
extern rtx gen_vec_extract_lo_v4di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_lo_v4df (rtx, rtx);
extern rtx gen_vec_extract_lo_v4df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_hi_v4di (rtx, rtx);
extern rtx gen_vec_extract_hi_v4di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_hi_v4df (rtx, rtx);
extern rtx gen_vec_extract_hi_v4df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_lo_v8si (rtx, rtx);
extern rtx gen_vec_extract_lo_v8si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_lo_v8sf (rtx, rtx);
extern rtx gen_vec_extract_lo_v8sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_lo_v8si_maskm (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_lo_v8sf_maskm (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_hi_v8si_maskm (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_hi_v8sf_maskm (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_hi_v8si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_hi_v8sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_extract_hi_v8si (rtx, rtx);
extern rtx gen_vec_extract_hi_v8sf (rtx, rtx);
extern rtx gen_vec_extract_lo_v32hi (rtx, rtx);
extern rtx gen_vec_extract_hi_v32hi (rtx, rtx);
extern rtx gen_vec_extract_lo_v16hi (rtx, rtx);
extern rtx gen_vec_extract_hi_v16hi (rtx, rtx);
extern rtx gen_vec_extract_lo_v64qi (rtx, rtx);
extern rtx gen_vec_extract_hi_v64qi (rtx, rtx);
extern rtx gen_vec_extract_lo_v32qi (rtx, rtx);
extern rtx gen_vec_extract_hi_v32qi (rtx, rtx);
extern rtx gen_avx512f_unpckhpd512_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_unpckhpd256 (rtx, rtx, rtx);
extern rtx gen_avx_unpckhpd256_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_unpckhpd128_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_unpcklpd128_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vmscalefv4sf (rtx, rtx, rtx);
extern rtx gen_avx512f_vmscalefv4sf_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vmscalefv2df (rtx, rtx, rtx);
extern rtx gen_avx512f_vmscalefv2df_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_scalefv16sf (rtx, rtx, rtx);
extern rtx gen_avx512f_scalefv16sf_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_scalefv16sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_scalefv16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scalefv8sf (rtx, rtx, rtx);
extern rtx gen_avx512vl_scalefv8sf_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scalefv8sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scalefv8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scalefv4sf (rtx, rtx, rtx);
extern rtx gen_avx512vl_scalefv4sf_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scalefv4sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scalefv4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_scalefv8df (rtx, rtx, rtx);
extern rtx gen_avx512f_scalefv8df_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_scalefv8df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_scalefv8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scalefv4df (rtx, rtx, rtx);
extern rtx gen_avx512vl_scalefv4df_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scalefv4df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scalefv4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scalefv2df (rtx, rtx, rtx);
extern rtx gen_avx512vl_scalefv2df_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scalefv2df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scalefv2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vternlogv16si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vternlogv16si_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vternlogv8si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vternlogv8si_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vternlogv4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vternlogv4si_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vternlogv8di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vternlogv8di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vternlogv4di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vternlogv4di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vternlogv2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vternlogv2di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vternlogv16si_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vternlogv8si_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vternlogv4si_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vternlogv8di_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vternlogv4di_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vternlogv2di_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_getexpv16sf (rtx, rtx);
extern rtx gen_avx512f_getexpv16sf_round (rtx, rtx, rtx);
extern rtx gen_avx512f_getexpv16sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_getexpv16sf_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getexpv8sf (rtx, rtx);
extern rtx gen_avx512vl_getexpv8sf_round (rtx, rtx, rtx);
extern rtx gen_avx512vl_getexpv8sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getexpv8sf_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getexpv4sf (rtx, rtx);
extern rtx gen_avx512vl_getexpv4sf_round (rtx, rtx, rtx);
extern rtx gen_avx512vl_getexpv4sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getexpv4sf_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_getexpv8df (rtx, rtx);
extern rtx gen_avx512f_getexpv8df_round (rtx, rtx, rtx);
extern rtx gen_avx512f_getexpv8df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_getexpv8df_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getexpv4df (rtx, rtx);
extern rtx gen_avx512vl_getexpv4df_round (rtx, rtx, rtx);
extern rtx gen_avx512vl_getexpv4df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getexpv4df_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getexpv2df (rtx, rtx);
extern rtx gen_avx512vl_getexpv2df_round (rtx, rtx, rtx);
extern rtx gen_avx512vl_getexpv2df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getexpv2df_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sgetexpv4sf (rtx, rtx, rtx);
extern rtx gen_avx512f_sgetexpv4sf_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sgetexpv2df (rtx, rtx, rtx);
extern rtx gen_avx512f_sgetexpv2df_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_alignv16si_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_alignv8si_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_alignv4si_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_alignv8di_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_alignv4di_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_alignv2di_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fixupimmv16sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fixupimmv16sf_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fixupimmv16sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fixupimmv16sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv8sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv8sf_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv8sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv8sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv4sf_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv4sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv4sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fixupimmv8df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fixupimmv8df_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fixupimmv8df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fixupimmv8df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv4df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv4df_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv4df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv4df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv2df_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv2df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv2df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fixupimmv16sf_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fixupimmv16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv8sf_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv4sf_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fixupimmv8df_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fixupimmv8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv4df_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv2df_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sfixupimmv4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sfixupimmv4sf_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sfixupimmv4sf_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sfixupimmv4sf_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sfixupimmv2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sfixupimmv2df_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sfixupimmv2df_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sfixupimmv2df_maskz_1_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sfixupimmv4sf_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sfixupimmv4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sfixupimmv2df_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sfixupimmv2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_rndscalev16sf (rtx, rtx, rtx);
extern rtx gen_avx512f_rndscalev16sf_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_rndscalev16sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_rndscalev16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rndscalev8sf (rtx, rtx, rtx);
extern rtx gen_avx512vl_rndscalev8sf_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rndscalev8sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rndscalev8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rndscalev4sf (rtx, rtx, rtx);
extern rtx gen_avx512vl_rndscalev4sf_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rndscalev4sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rndscalev4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_rndscalev8df (rtx, rtx, rtx);
extern rtx gen_avx512f_rndscalev8df_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_rndscalev8df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_rndscalev8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rndscalev4df (rtx, rtx, rtx);
extern rtx gen_avx512vl_rndscalev4df_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rndscalev4df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rndscalev4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rndscalev2df (rtx, rtx, rtx);
extern rtx gen_avx512vl_rndscalev2df_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rndscalev2df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rndscalev2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_rndscalev4sf (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_rndscalev4sf_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_rndscalev2df (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_rndscalev2df_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shufps512_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shufps512_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shufpd512_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shufpd512_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_shufpd256_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_shufpd256_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_shufpd_v2df_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_interleave_highv4di (rtx, rtx, rtx);
extern rtx gen_avx2_interleave_highv4di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_interleave_highv8di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_interleave_highv2di (rtx, rtx, rtx);
extern rtx gen_vec_interleave_highv2di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_interleave_lowv4di (rtx, rtx, rtx);
extern rtx gen_avx2_interleave_lowv4di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_interleave_lowv8di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_interleave_lowv2di (rtx, rtx, rtx);
extern rtx gen_vec_interleave_lowv2di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_shufpd_v2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_shufpd_v2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_storehpd (rtx, rtx);
extern rtx gen_sse2_storelpd (rtx, rtx);
extern rtx gen_sse2_loadhpd (rtx, rtx, rtx);
extern rtx gen_sse2_loadlpd (rtx, rtx, rtx);
extern rtx gen_sse2_movsd (rtx, rtx, rtx);
extern rtx gen_vec_dupv2df (rtx, rtx);
extern rtx gen_vec_dupv2df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_concatv2df (rtx, rtx, rtx);
extern rtx gen_avx512f_ss_truncatev16siv16qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_truncatev16siv16qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_us_truncatev16siv16qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_ss_truncatev16siv16hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_truncatev16siv16hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_us_truncatev16siv16hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_ss_truncatev8div8si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_truncatev8div8si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_us_truncatev8div8si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_ss_truncatev8div8hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_truncatev8div8hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_us_truncatev8div8hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_ss_truncatev32hiv32qi2 (rtx, rtx);
extern rtx gen_avx512bw_truncatev32hiv32qi2 (rtx, rtx);
extern rtx gen_avx512bw_us_truncatev32hiv32qi2 (rtx, rtx);
extern rtx gen_avx512bw_ss_truncatev32hiv32qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_truncatev32hiv32qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_us_truncatev32hiv32qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev4div4si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev4div4si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev4div4si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev8siv8hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev8siv8hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev8siv8hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev16hiv16qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev16hiv16qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev16hiv16qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev2div2qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev2div2qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev2div2qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev2div2qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev2div2qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev2div2qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev4siv4qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev4siv4qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev4siv4qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev4div4qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev4div4qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev4div4qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev4siv4qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev4siv4qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev4siv4qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev4div4qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev4div4qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev4div4qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev8hiv8qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev8hiv8qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev8hiv8qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev8siv8qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev8siv8qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev8siv8qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev8hiv8qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev8hiv8qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev8hiv8qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev8siv8qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev8siv8qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev8siv8qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev4siv4hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev4siv4hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev4siv4hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev4div4hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev4div4hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev4div4hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev4siv4hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev4siv4hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev4siv4hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev4div4hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev4div4hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev4div4hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev2div2hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev2div2hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev2div2hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev2div2hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev2div2hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev2div2hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev2div2si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev2div2si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev2div2si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev2div2si2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev2div2si2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev2div2si2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512f_ss_truncatev8div16qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_truncatev8div16qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_us_truncatev8div16qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_ss_truncatev8div16qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512f_truncatev8div16qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512f_us_truncatev8div16qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512bw_pmaddwd512v32hi (rtx, rtx, rtx);
extern rtx gen_avx512bw_pmaddwd512v32hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_pmaddwd512v16hi (rtx, rtx, rtx);
extern rtx gen_avx512bw_pmaddwd512v16hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_pmaddwd512v8hi (rtx, rtx, rtx);
extern rtx gen_avx512bw_pmaddwd512v8hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_mulv8di3 (rtx, rtx, rtx);
extern rtx gen_avx512dq_mulv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_mulv4di3 (rtx, rtx, rtx);
extern rtx gen_avx512dq_mulv4di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_mulv2di3 (rtx, rtx, rtx);
extern rtx gen_avx512dq_mulv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashrv16hi3 (rtx, rtx, rtx);
extern rtx gen_ashrv8hi3 (rtx, rtx, rtx);
extern rtx gen_ashrv8si3 (rtx, rtx, rtx);
extern rtx gen_ashrv4si3 (rtx, rtx, rtx);
extern rtx gen_ashrv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashrv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashrv8si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashrv4si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashrv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashrv32hi3 (rtx, rtx, rtx);
extern rtx gen_ashrv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashrv4di3 (rtx, rtx, rtx);
extern rtx gen_ashrv4di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashrv16si3 (rtx, rtx, rtx);
extern rtx gen_ashrv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashrv8di3 (rtx, rtx, rtx);
extern rtx gen_ashrv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashlv32hi3 (rtx, rtx, rtx);
extern rtx gen_ashlv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_lshrv32hi3 (rtx, rtx, rtx);
extern rtx gen_lshrv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashlv16hi3 (rtx, rtx, rtx);
extern rtx gen_ashlv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_lshrv16hi3 (rtx, rtx, rtx);
extern rtx gen_lshrv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashlv8hi3 (rtx, rtx, rtx);
extern rtx gen_ashlv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_lshrv8hi3 (rtx, rtx, rtx);
extern rtx gen_lshrv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashlv8si3 (rtx, rtx, rtx);
extern rtx gen_ashlv8si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_lshrv8si3 (rtx, rtx, rtx);
extern rtx gen_lshrv8si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashlv4si3 (rtx, rtx, rtx);
extern rtx gen_ashlv4si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_lshrv4si3 (rtx, rtx, rtx);
extern rtx gen_lshrv4si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashlv4di3 (rtx, rtx, rtx);
extern rtx gen_ashlv4di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_lshrv4di3 (rtx, rtx, rtx);
extern rtx gen_lshrv4di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashlv2di3 (rtx, rtx, rtx);
extern rtx gen_ashlv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_lshrv2di3 (rtx, rtx, rtx);
extern rtx gen_lshrv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashlv16si3 (rtx, rtx, rtx);
extern rtx gen_ashlv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_lshrv16si3 (rtx, rtx, rtx);
extern rtx gen_lshrv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ashlv8di3 (rtx, rtx, rtx);
extern rtx gen_ashlv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_lshrv8di3 (rtx, rtx, rtx);
extern rtx gen_lshrv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_ashlv4ti3 (rtx, rtx, rtx);
extern rtx gen_avx2_ashlv2ti3 (rtx, rtx, rtx);
extern rtx gen_sse2_ashlv1ti3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_lshrv4ti3 (rtx, rtx, rtx);
extern rtx gen_avx2_lshrv2ti3 (rtx, rtx, rtx);
extern rtx gen_sse2_lshrv1ti3 (rtx, rtx, rtx);
extern rtx gen_avx512f_rolvv16si (rtx, rtx, rtx);
extern rtx gen_avx512f_rolvv16si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_rorvv16si (rtx, rtx, rtx);
extern rtx gen_avx512f_rorvv16si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rolvv8si (rtx, rtx, rtx);
extern rtx gen_avx512vl_rolvv8si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rorvv8si (rtx, rtx, rtx);
extern rtx gen_avx512vl_rorvv8si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rolvv4si (rtx, rtx, rtx);
extern rtx gen_avx512vl_rolvv4si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rorvv4si (rtx, rtx, rtx);
extern rtx gen_avx512vl_rorvv4si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_rolvv8di (rtx, rtx, rtx);
extern rtx gen_avx512f_rolvv8di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_rorvv8di (rtx, rtx, rtx);
extern rtx gen_avx512f_rorvv8di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rolvv4di (rtx, rtx, rtx);
extern rtx gen_avx512vl_rolvv4di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rorvv4di (rtx, rtx, rtx);
extern rtx gen_avx512vl_rorvv4di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rolvv2di (rtx, rtx, rtx);
extern rtx gen_avx512vl_rolvv2di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rorvv2di (rtx, rtx, rtx);
extern rtx gen_avx512vl_rorvv2di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_rolv16si (rtx, rtx, rtx);
extern rtx gen_avx512f_rolv16si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_rorv16si (rtx, rtx, rtx);
extern rtx gen_avx512f_rorv16si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rolv8si (rtx, rtx, rtx);
extern rtx gen_avx512vl_rolv8si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rorv8si (rtx, rtx, rtx);
extern rtx gen_avx512vl_rorv8si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rolv4si (rtx, rtx, rtx);
extern rtx gen_avx512vl_rolv4si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rorv4si (rtx, rtx, rtx);
extern rtx gen_avx512vl_rorv4si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_rolv8di (rtx, rtx, rtx);
extern rtx gen_avx512f_rolv8di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_rorv8di (rtx, rtx, rtx);
extern rtx gen_avx512f_rorv8di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rolv4di (rtx, rtx, rtx);
extern rtx gen_avx512vl_rolv4di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rorv4di (rtx, rtx, rtx);
extern rtx gen_avx512vl_rorv4di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rolv2di (rtx, rtx, rtx);
extern rtx gen_avx512vl_rolv2di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_rorv2di (rtx, rtx, rtx);
extern rtx gen_avx512vl_rorv2di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smaxv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sminv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_umaxv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_uminv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smaxv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sminv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_umaxv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_uminv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smaxv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sminv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_umaxv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_uminv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smaxv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sminv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_umaxv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_uminv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smaxv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sminv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_umaxv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_uminv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smaxv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sminv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_umaxv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_uminv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_eqv64qi3_1 (rtx, rtx, rtx);
extern rtx gen_avx512bw_eqv64qi3_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv16qi3_1 (rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv16qi3_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv32qi3_1 (rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv32qi3_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_eqv32hi3_1 (rtx, rtx, rtx);
extern rtx gen_avx512bw_eqv32hi3_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv16hi3_1 (rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv16hi3_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv8hi3_1 (rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv8hi3_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_eqv16si3_1 (rtx, rtx, rtx);
extern rtx gen_avx512f_eqv16si3_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv8si3_1 (rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv8si3_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv4si3_1 (rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv4si3_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_eqv8di3_1 (rtx, rtx, rtx);
extern rtx gen_avx512f_eqv8di3_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv4di3_1 (rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv4di3_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv2di3_1 (rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv2di3_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_2_gtv2di3 (rtx, rtx, rtx);
extern rtx gen_avx2_gtv32qi3 (rtx, rtx, rtx);
extern rtx gen_avx2_gtv16hi3 (rtx, rtx, rtx);
extern rtx gen_avx2_gtv8si3 (rtx, rtx, rtx);
extern rtx gen_avx2_gtv4di3 (rtx, rtx, rtx);
extern rtx gen_avx512f_gtv16si3 (rtx, rtx, rtx);
extern rtx gen_avx512f_gtv16si3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gtv8si3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_gtv8si3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gtv4si3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_gtv4si3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_gtv8di3 (rtx, rtx, rtx);
extern rtx gen_avx512f_gtv8di3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gtv4di3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_gtv4di3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gtv2di3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_gtv2di3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_gtv64qi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_gtv64qi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gtv16qi3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_gtv16qi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gtv32qi3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_gtv32qi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_gtv32hi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_gtv32hi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gtv16hi3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_gtv16hi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gtv8hi3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_gtv8hi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_gtv16qi3 (rtx, rtx, rtx);
extern rtx gen_sse2_gtv8hi3 (rtx, rtx, rtx);
extern rtx gen_sse2_gtv4si3 (rtx, rtx, rtx);
extern rtx gen_andv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_iorv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_xorv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_andv8si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_iorv8si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_xorv8si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_andv4si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_iorv4si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_xorv4si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_andv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_iorv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_xorv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_andv4di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_iorv4di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_xorv4di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_andv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_iorv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_xorv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_testmv64qi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_testmv64qi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_testmv16qi3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_testmv16qi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_testmv32qi3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_testmv32qi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_testmv32hi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_testmv32hi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_testmv16hi3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_testmv16hi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_testmv8hi3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_testmv8hi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_testmv16si3 (rtx, rtx, rtx);
extern rtx gen_avx512f_testmv16si3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_testmv8si3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_testmv8si3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_testmv4si3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_testmv4si3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_testmv8di3 (rtx, rtx, rtx);
extern rtx gen_avx512f_testmv8di3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_testmv4di3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_testmv4di3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_testmv2di3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_testmv2di3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_testnmv64qi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_testnmv64qi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_testnmv16qi3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_testnmv16qi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_testnmv32qi3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_testnmv32qi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_testnmv32hi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_testnmv32hi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_testnmv16hi3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_testnmv16hi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_testnmv8hi3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_testnmv8hi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_testnmv16si3 (rtx, rtx, rtx);
extern rtx gen_avx512f_testnmv16si3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_testnmv8si3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_testnmv8si3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_testnmv4si3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_testnmv4si3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_testnmv8di3 (rtx, rtx, rtx);
extern rtx gen_avx512f_testnmv8di3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_testnmv4di3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_testnmv4di3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_testnmv2di3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_testnmv2di3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_packsswb (rtx, rtx, rtx);
extern rtx gen_avx512bw_packsswb_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_packsswb (rtx, rtx, rtx);
extern rtx gen_avx2_packsswb_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_packsswb (rtx, rtx, rtx);
extern rtx gen_sse2_packsswb_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_packssdw (rtx, rtx, rtx);
extern rtx gen_avx512bw_packssdw_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_packssdw (rtx, rtx, rtx);
extern rtx gen_avx2_packssdw_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_packssdw (rtx, rtx, rtx);
extern rtx gen_sse2_packssdw_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_packuswb (rtx, rtx, rtx);
extern rtx gen_avx512bw_packuswb_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_packuswb (rtx, rtx, rtx);
extern rtx gen_avx2_packuswb_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_packuswb (rtx, rtx, rtx);
extern rtx gen_sse2_packuswb_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_interleave_highv64qi (rtx, rtx, rtx);
extern rtx gen_avx512bw_interleave_highv64qi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_interleave_highv32qi (rtx, rtx, rtx);
extern rtx gen_avx2_interleave_highv32qi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_interleave_highv16qi (rtx, rtx, rtx);
extern rtx gen_vec_interleave_highv16qi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_interleave_lowv64qi (rtx, rtx, rtx);
extern rtx gen_avx512bw_interleave_lowv64qi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_interleave_lowv32qi (rtx, rtx, rtx);
extern rtx gen_avx2_interleave_lowv32qi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_interleave_lowv16qi (rtx, rtx, rtx);
extern rtx gen_vec_interleave_lowv16qi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_interleave_highv32hi (rtx, rtx, rtx);
extern rtx gen_avx512bw_interleave_highv32hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_interleave_highv16hi (rtx, rtx, rtx);
extern rtx gen_avx2_interleave_highv16hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_interleave_highv8hi (rtx, rtx, rtx);
extern rtx gen_vec_interleave_highv8hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_interleave_lowv32hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_interleave_lowv16hi (rtx, rtx, rtx);
extern rtx gen_avx2_interleave_lowv16hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_interleave_lowv8hi (rtx, rtx, rtx);
extern rtx gen_vec_interleave_lowv8hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_interleave_highv8si (rtx, rtx, rtx);
extern rtx gen_avx2_interleave_highv8si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_interleave_highv16si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_interleave_highv4si (rtx, rtx, rtx);
extern rtx gen_vec_interleave_highv4si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_interleave_lowv8si (rtx, rtx, rtx);
extern rtx gen_avx2_interleave_lowv8si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_interleave_lowv16si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_interleave_lowv4si (rtx, rtx, rtx);
extern rtx gen_vec_interleave_lowv4si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_pinsrb (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_pinsrw (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_pinsrd (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_pinsrq (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_vinsertf64x2_1_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_vinserti64x2_1_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vinsertf32x4_1_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vinserti32x4_1_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v16sf (rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v16sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v16si (rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v16si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v16sf (rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v16sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v16si (rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v16si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v8df (rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v8df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v8di (rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v8di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v8df (rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v8df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v8di (rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v8di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_shuf_i64x2_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_shuf_f64x2_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shuf_f64x2_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shuf_f64x2_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shuf_i64x2_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shuf_i64x2_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_shuf_i32x4_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_shuf_f32x4_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shuf_f32x4_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shuf_f32x4_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shuf_i32x4_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shuf_i32x4_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_pshufd_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_pshufd_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_pshufd_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_pshufd_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_pshufd_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_pshufd_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_pshuflwv32hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_pshuflw_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_pshuflw_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_pshuflw_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_pshuflw_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_pshufhwv32hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_pshufhw_1 (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_pshufhw_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_pshufhw_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_pshufhw_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_loadld (rtx, rtx, rtx);
extern rtx gen_vec_concatv2di (rtx, rtx, rtx);
extern rtx gen_avx512f_psadbw (rtx, rtx, rtx);
extern rtx gen_avx2_psadbw (rtx, rtx, rtx);
extern rtx gen_sse2_psadbw (rtx, rtx, rtx);
extern rtx gen_avx_movmskps256 (rtx, rtx);
extern rtx gen_sse_movmskps (rtx, rtx);
extern rtx gen_avx_movmskpd256 (rtx, rtx);
extern rtx gen_sse2_movmskpd (rtx, rtx);
extern rtx gen_avx2_pmovmskb (rtx, rtx);
extern rtx gen_sse2_pmovmskb (rtx, rtx);
extern rtx gen_sse_ldmxcsr (rtx);
extern rtx gen_sse_stmxcsr (rtx);
extern rtx gen_sse2_clflush (rtx);
extern rtx gen_sse3_mwait (rtx, rtx);
extern rtx gen_sse3_monitor_si (rtx, rtx, rtx);
extern rtx gen_sse3_monitor_di (rtx, rtx, rtx);
extern rtx gen_avx2_phaddwv16hi3 (rtx, rtx, rtx);
extern rtx gen_avx2_phaddswv16hi3 (rtx, rtx, rtx);
extern rtx gen_avx2_phsubwv16hi3 (rtx, rtx, rtx);
extern rtx gen_avx2_phsubswv16hi3 (rtx, rtx, rtx);
extern rtx gen_ssse3_phaddwv8hi3 (rtx, rtx, rtx);
extern rtx gen_ssse3_phaddswv8hi3 (rtx, rtx, rtx);
extern rtx gen_ssse3_phsubwv8hi3 (rtx, rtx, rtx);
extern rtx gen_ssse3_phsubswv8hi3 (rtx, rtx, rtx);
extern rtx gen_ssse3_phaddwv4hi3 (rtx, rtx, rtx);
extern rtx gen_ssse3_phaddswv4hi3 (rtx, rtx, rtx);
extern rtx gen_ssse3_phsubwv4hi3 (rtx, rtx, rtx);
extern rtx gen_ssse3_phsubswv4hi3 (rtx, rtx, rtx);
extern rtx gen_avx2_phadddv8si3 (rtx, rtx, rtx);
extern rtx gen_avx2_phsubdv8si3 (rtx, rtx, rtx);
extern rtx gen_ssse3_phadddv4si3 (rtx, rtx, rtx);
extern rtx gen_ssse3_phsubdv4si3 (rtx, rtx, rtx);
extern rtx gen_ssse3_phadddv2si3 (rtx, rtx, rtx);
extern rtx gen_ssse3_phsubdv2si3 (rtx, rtx, rtx);
extern rtx gen_avx2_pmaddubsw256 (rtx, rtx, rtx);
extern rtx gen_avx512bw_pmaddubsw512v8hi (rtx, rtx, rtx);
extern rtx gen_avx512bw_pmaddubsw512v8hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_pmaddubsw512v16hi (rtx, rtx, rtx);
extern rtx gen_avx512bw_pmaddubsw512v16hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_pmaddubsw512v32hi (rtx, rtx, rtx);
extern rtx gen_avx512bw_pmaddubsw512v32hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_umulhrswv32hi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_umulhrswv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ssse3_pmaddubsw128 (rtx, rtx, rtx);
extern rtx gen_ssse3_pmaddubsw (rtx, rtx, rtx);
extern rtx gen_avx512bw_pshufbv64qi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_pshufbv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_pshufbv32qi3 (rtx, rtx, rtx);
extern rtx gen_avx2_pshufbv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ssse3_pshufbv16qi3 (rtx, rtx, rtx);
extern rtx gen_ssse3_pshufbv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ssse3_pshufbv8qi3 (rtx, rtx, rtx);
extern rtx gen_avx2_psignv32qi3 (rtx, rtx, rtx);
extern rtx gen_ssse3_psignv16qi3 (rtx, rtx, rtx);
extern rtx gen_avx2_psignv16hi3 (rtx, rtx, rtx);
extern rtx gen_ssse3_psignv8hi3 (rtx, rtx, rtx);
extern rtx gen_avx2_psignv8si3 (rtx, rtx, rtx);
extern rtx gen_ssse3_psignv4si3 (rtx, rtx, rtx);
extern rtx gen_ssse3_psignv8qi3 (rtx, rtx, rtx);
extern rtx gen_ssse3_psignv4hi3 (rtx, rtx, rtx);
extern rtx gen_ssse3_psignv2si3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_palignrv64qi_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_palignrv32qi_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ssse3_palignrv16qi_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_palignrv4ti (rtx, rtx, rtx, rtx);
extern rtx gen_avx2_palignrv2ti (rtx, rtx, rtx, rtx);
extern rtx gen_ssse3_palignrti (rtx, rtx, rtx, rtx);
extern rtx gen_ssse3_palignrdi (rtx, rtx, rtx, rtx);
extern rtx gen_absv16si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_absv8si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_absv4si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_absv8di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_absv4di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_absv2di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_absv64qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_absv16qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_absv32qi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_absv32hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_absv16hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_absv8hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_absv8qi2 (rtx, rtx);
extern rtx gen_absv4hi2 (rtx, rtx);
extern rtx gen_absv2si2 (rtx, rtx);
extern rtx gen_sse4a_movntsf (rtx, rtx);
extern rtx gen_sse4a_movntdf (rtx, rtx);
extern rtx gen_sse4a_vmmovntv4sf (rtx, rtx);
extern rtx gen_sse4a_vmmovntv2df (rtx, rtx);
extern rtx gen_sse4a_extrqi (rtx, rtx, rtx, rtx);
extern rtx gen_sse4a_extrq (rtx, rtx, rtx);
extern rtx gen_sse4a_insertqi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse4a_insertq (rtx, rtx, rtx);
extern rtx gen_avx_blendps256 (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_blendps (rtx, rtx, rtx, rtx);
extern rtx gen_avx_blendpd256 (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_blendpd (rtx, rtx, rtx, rtx);
extern rtx gen_avx_blendvps256 (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_blendvps (rtx, rtx, rtx, rtx);
extern rtx gen_avx_blendvpd256 (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_blendvpd (rtx, rtx, rtx, rtx);
extern rtx gen_avx_dpps256 (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_dpps (rtx, rtx, rtx, rtx);
extern rtx gen_avx_dppd256 (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_dppd (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_movntdqa (rtx, rtx);
extern rtx gen_avx2_movntdqa (rtx, rtx);
extern rtx gen_sse4_1_movntdqa (rtx, rtx);
extern rtx gen_avx2_mpsadbw (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_mpsadbw (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_packusdw (rtx, rtx, rtx);
extern rtx gen_avx512bw_packusdw_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_packusdw (rtx, rtx, rtx);
extern rtx gen_avx2_packusdw_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_packusdw (rtx, rtx, rtx);
extern rtx gen_sse4_1_packusdw_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_pblendvb (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_pblendvb (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_pblendw (rtx, rtx, rtx, rtx);
extern rtx gen_avx2_pblenddv8si (rtx, rtx, rtx, rtx);
extern rtx gen_avx2_pblenddv4si (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_phminposuw (rtx, rtx);
extern rtx gen_avx2_sign_extendv16qiv16hi2 (rtx, rtx);
extern rtx gen_avx2_sign_extendv16qiv16hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx2_zero_extendv16qiv16hi2 (rtx, rtx);
extern rtx gen_avx2_zero_extendv16qiv16hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_sign_extendv32qiv32hi2 (rtx, rtx);
extern rtx gen_avx512bw_sign_extendv32qiv32hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_zero_extendv32qiv32hi2 (rtx, rtx);
extern rtx gen_avx512bw_zero_extendv32qiv32hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_sign_extendv8qiv8hi2 (rtx, rtx);
extern rtx gen_sse4_1_sign_extendv8qiv8hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_zero_extendv8qiv8hi2 (rtx, rtx);
extern rtx gen_sse4_1_zero_extendv8qiv8hi2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sign_extendv16qiv16si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_zero_extendv16qiv16si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx2_sign_extendv8qiv8si2 (rtx, rtx);
extern rtx gen_avx2_sign_extendv8qiv8si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx2_zero_extendv8qiv8si2 (rtx, rtx);
extern rtx gen_avx2_zero_extendv8qiv8si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_sign_extendv4qiv4si2 (rtx, rtx);
extern rtx gen_sse4_1_sign_extendv4qiv4si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_zero_extendv4qiv4si2 (rtx, rtx);
extern rtx gen_sse4_1_zero_extendv4qiv4si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sign_extendv16hiv16si2 (rtx, rtx);
extern rtx gen_avx512f_sign_extendv16hiv16si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_zero_extendv16hiv16si2 (rtx, rtx);
extern rtx gen_avx512f_zero_extendv16hiv16si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx2_sign_extendv8hiv8si2 (rtx, rtx);
extern rtx gen_avx2_sign_extendv8hiv8si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx2_zero_extendv8hiv8si2 (rtx, rtx);
extern rtx gen_avx2_zero_extendv8hiv8si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_sign_extendv4hiv4si2 (rtx, rtx);
extern rtx gen_sse4_1_sign_extendv4hiv4si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_zero_extendv4hiv4si2 (rtx, rtx);
extern rtx gen_sse4_1_zero_extendv4hiv4si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sign_extendv8qiv8di2 (rtx, rtx);
extern rtx gen_avx512f_sign_extendv8qiv8di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_zero_extendv8qiv8di2 (rtx, rtx);
extern rtx gen_avx512f_zero_extendv8qiv8di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx2_sign_extendv4qiv4di2 (rtx, rtx);
extern rtx gen_avx2_sign_extendv4qiv4di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx2_zero_extendv4qiv4di2 (rtx, rtx);
extern rtx gen_avx2_zero_extendv4qiv4di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_sign_extendv2qiv2di2 (rtx, rtx);
extern rtx gen_sse4_1_sign_extendv2qiv2di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_zero_extendv2qiv2di2 (rtx, rtx);
extern rtx gen_sse4_1_zero_extendv2qiv2di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sign_extendv8hiv8di2 (rtx, rtx);
extern rtx gen_avx512f_sign_extendv8hiv8di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_zero_extendv8hiv8di2 (rtx, rtx);
extern rtx gen_avx512f_zero_extendv8hiv8di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx2_sign_extendv4hiv4di2 (rtx, rtx);
extern rtx gen_avx2_sign_extendv4hiv4di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx2_zero_extendv4hiv4di2 (rtx, rtx);
extern rtx gen_avx2_zero_extendv4hiv4di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_sign_extendv2hiv2di2 (rtx, rtx);
extern rtx gen_sse4_1_sign_extendv2hiv2di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_zero_extendv2hiv2di2 (rtx, rtx);
extern rtx gen_sse4_1_zero_extendv2hiv2di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sign_extendv8siv8di2 (rtx, rtx);
extern rtx gen_avx512f_sign_extendv8siv8di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_zero_extendv8siv8di2 (rtx, rtx);
extern rtx gen_avx512f_zero_extendv8siv8di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx2_sign_extendv4siv4di2 (rtx, rtx);
extern rtx gen_avx2_sign_extendv4siv4di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx2_zero_extendv4siv4di2 (rtx, rtx);
extern rtx gen_avx2_zero_extendv4siv4di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_sign_extendv2siv2di2 (rtx, rtx);
extern rtx gen_sse4_1_sign_extendv2siv2di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_zero_extendv2siv2di2 (rtx, rtx);
extern rtx gen_sse4_1_zero_extendv2siv2di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx_vtestps256 (rtx, rtx);
extern rtx gen_avx_vtestps (rtx, rtx);
extern rtx gen_avx_vtestpd256 (rtx, rtx);
extern rtx gen_avx_vtestpd (rtx, rtx);
extern rtx gen_sse4_1_ptestv16qi (rtx, rtx);
extern rtx gen_sse4_1_ptestv8hi (rtx, rtx);
extern rtx gen_sse4_1_ptestv4si (rtx, rtx);
extern rtx gen_sse4_1_ptestv2di (rtx, rtx);
extern rtx gen_sse4_1_ptestv4sf (rtx, rtx);
extern rtx gen_sse4_1_ptestv2df (rtx, rtx);
extern rtx gen_avx_ptestv32qi (rtx, rtx);
extern rtx gen_avx_ptestv16hi (rtx, rtx);
extern rtx gen_avx_ptestv8si (rtx, rtx);
extern rtx gen_avx_ptestv4di (rtx, rtx);
extern rtx gen_avx_ptestv8sf (rtx, rtx);
extern rtx gen_avx_ptestv4df (rtx, rtx);
extern rtx gen_avx_roundps256 (rtx, rtx, rtx);
extern rtx gen_sse4_1_roundps (rtx, rtx, rtx);
extern rtx gen_avx_roundpd256 (rtx, rtx, rtx);
extern rtx gen_sse4_1_roundpd (rtx, rtx, rtx);
extern rtx gen_sse4_1_roundss (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_roundsd (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_2_pcmpestr (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse4_2_pcmpestri (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse4_2_pcmpestrm (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse4_2_pcmpestr_cconly (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse4_2_pcmpistr (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse4_2_pcmpistri (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_2_pcmpistrm (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_2_pcmpistr_cconly (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512er_exp2v16sf (rtx, rtx);
extern rtx gen_avx512er_exp2v16sf_round (rtx, rtx, rtx);
extern rtx gen_avx512er_exp2v16sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512er_exp2v16sf_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512er_exp2v8df (rtx, rtx);
extern rtx gen_avx512er_exp2v8df_round (rtx, rtx, rtx);
extern rtx gen_avx512er_exp2v8df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512er_exp2v8df_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512er_rcp28v16sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512er_rcp28v16sf_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512er_rcp28v8df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512er_rcp28v8df_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512er_vmrcp28v4sf (rtx, rtx, rtx);
extern rtx gen_avx512er_vmrcp28v4sf_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512er_vmrcp28v2df (rtx, rtx, rtx);
extern rtx gen_avx512er_vmrcp28v2df_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512er_rsqrt28v16sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512er_rsqrt28v16sf_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512er_rsqrt28v8df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512er_rsqrt28v8df_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512er_vmrsqrt28v4sf (rtx, rtx, rtx);
extern rtx gen_avx512er_vmrsqrt28v4sf_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512er_vmrsqrt28v2df (rtx, rtx, rtx);
extern rtx gen_avx512er_vmrsqrt28v2df_round (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pmacsww (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pmacssww (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pmacsdd (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pmacssdd (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pmacsdql (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pmacssdql (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pmacsdqh (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pmacssdqh (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pmacswd (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pmacsswd (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pmadcswd (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pmadcsswd (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcmov_v32qi256 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcmov_v16qi (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcmov_v16hi256 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcmov_v8hi (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcmov_v16si512 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcmov_v8si256 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcmov_v4si (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcmov_v8di512 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcmov_v4di256 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcmov_v2di (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcmov_v16sf512 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcmov_v8sf256 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcmov_v4sf (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcmov_v8df512 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcmov_v4df256 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcmov_v2df (rtx, rtx, rtx, rtx);
extern rtx gen_xop_phaddbw (rtx, rtx);
extern rtx gen_xop_phaddubw (rtx, rtx);
extern rtx gen_xop_phaddbd (rtx, rtx);
extern rtx gen_xop_phaddubd (rtx, rtx);
extern rtx gen_xop_phaddbq (rtx, rtx);
extern rtx gen_xop_phaddubq (rtx, rtx);
extern rtx gen_xop_phaddwd (rtx, rtx);
extern rtx gen_xop_phadduwd (rtx, rtx);
extern rtx gen_xop_phaddwq (rtx, rtx);
extern rtx gen_xop_phadduwq (rtx, rtx);
extern rtx gen_xop_phadddq (rtx, rtx);
extern rtx gen_xop_phaddudq (rtx, rtx);
extern rtx gen_xop_phsubbw (rtx, rtx);
extern rtx gen_xop_phsubwd (rtx, rtx);
extern rtx gen_xop_phsubdq (rtx, rtx);
extern rtx gen_xop_pperm (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pperm_pack_v2di_v4si (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pperm_pack_v4si_v8hi (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pperm_pack_v8hi_v16qi (rtx, rtx, rtx, rtx);
extern rtx gen_xop_rotlv16qi3 (rtx, rtx, rtx);
extern rtx gen_xop_rotlv8hi3 (rtx, rtx, rtx);
extern rtx gen_xop_rotlv4si3 (rtx, rtx, rtx);
extern rtx gen_xop_rotlv2di3 (rtx, rtx, rtx);
extern rtx gen_xop_rotrv16qi3 (rtx, rtx, rtx);
extern rtx gen_xop_rotrv8hi3 (rtx, rtx, rtx);
extern rtx gen_xop_rotrv4si3 (rtx, rtx, rtx);
extern rtx gen_xop_rotrv2di3 (rtx, rtx, rtx);
extern rtx gen_xop_vrotlv16qi3 (rtx, rtx, rtx);
extern rtx gen_xop_vrotlv8hi3 (rtx, rtx, rtx);
extern rtx gen_xop_vrotlv4si3 (rtx, rtx, rtx);
extern rtx gen_xop_vrotlv2di3 (rtx, rtx, rtx);
extern rtx gen_xop_shav16qi3 (rtx, rtx, rtx);
extern rtx gen_xop_shav8hi3 (rtx, rtx, rtx);
extern rtx gen_xop_shav4si3 (rtx, rtx, rtx);
extern rtx gen_xop_shav2di3 (rtx, rtx, rtx);
extern rtx gen_xop_shlv16qi3 (rtx, rtx, rtx);
extern rtx gen_xop_shlv8hi3 (rtx, rtx, rtx);
extern rtx gen_xop_shlv4si3 (rtx, rtx, rtx);
extern rtx gen_xop_shlv2di3 (rtx, rtx, rtx);
extern rtx gen_xop_frczsf2 (rtx, rtx);
extern rtx gen_xop_frczdf2 (rtx, rtx);
extern rtx gen_xop_frczv4sf2 (rtx, rtx);
extern rtx gen_xop_frczv2df2 (rtx, rtx);
extern rtx gen_xop_frczv8sf2 (rtx, rtx);
extern rtx gen_xop_frczv4df2 (rtx, rtx);
extern rtx gen_xop_maskcmpv16qi3 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_maskcmpv8hi3 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_maskcmpv4si3 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_maskcmpv2di3 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_maskcmp_unsv16qi3 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_maskcmp_unsv8hi3 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_maskcmp_unsv4si3 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_maskcmp_unsv2di3 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_maskcmp_uns2v16qi3 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_maskcmp_uns2v8hi3 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_maskcmp_uns2v4si3 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_maskcmp_uns2v2di3 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcom_tfv16qi3 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcom_tfv8hi3 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcom_tfv4si3 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_pcom_tfv2di3 (rtx, rtx, rtx, rtx);
extern rtx gen_xop_vpermil2v8sf3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_xop_vpermil2v4sf3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_xop_vpermil2v4df3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_xop_vpermil2v2df3 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_aesenc (rtx, rtx, rtx);
extern rtx gen_aesenclast (rtx, rtx, rtx);
extern rtx gen_aesdec (rtx, rtx, rtx);
extern rtx gen_aesdeclast (rtx, rtx, rtx);
extern rtx gen_aesimc (rtx, rtx);
extern rtx gen_aeskeygenassist (rtx, rtx, rtx);
extern rtx gen_pclmulqdq (rtx, rtx, rtx, rtx);
extern rtx gen_avx_vzeroupper (void);
extern rtx gen_avx2_pbroadcastv16si (rtx, rtx);
extern rtx gen_avx2_pbroadcastv8di (rtx, rtx);
extern rtx gen_avx2_pbroadcastv64qi (rtx, rtx);
extern rtx gen_avx2_pbroadcastv32qi (rtx, rtx);
extern rtx gen_avx2_pbroadcastv16qi (rtx, rtx);
extern rtx gen_avx2_pbroadcastv32hi (rtx, rtx);
extern rtx gen_avx2_pbroadcastv16hi (rtx, rtx);
extern rtx gen_avx2_pbroadcastv8hi (rtx, rtx);
extern rtx gen_avx2_pbroadcastv8si (rtx, rtx);
extern rtx gen_avx2_pbroadcastv4si (rtx, rtx);
extern rtx gen_avx2_pbroadcastv4di (rtx, rtx);
extern rtx gen_avx2_pbroadcastv2di (rtx, rtx);
extern rtx gen_avx2_pbroadcastv32qi_1 (rtx, rtx);
extern rtx gen_avx2_pbroadcastv16hi_1 (rtx, rtx);
extern rtx gen_avx2_pbroadcastv8si_1 (rtx, rtx);
extern rtx gen_avx2_pbroadcastv4di_1 (rtx, rtx);
extern rtx gen_avx2_permvarv8si (rtx, rtx, rtx);
extern rtx gen_avx2_permvarv8si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_permvarv8sf (rtx, rtx, rtx);
extern rtx gen_avx2_permvarv8sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_permvarv16si (rtx, rtx, rtx);
extern rtx gen_avx512f_permvarv16si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_permvarv16sf (rtx, rtx, rtx);
extern rtx gen_avx512f_permvarv16sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_permvarv8di (rtx, rtx, rtx);
extern rtx gen_avx512f_permvarv8di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_permvarv8df (rtx, rtx, rtx);
extern rtx gen_avx512f_permvarv8df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_permvarv4di (rtx, rtx, rtx);
extern rtx gen_avx2_permvarv4di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_permvarv4df (rtx, rtx, rtx);
extern rtx gen_avx2_permvarv4df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_permvarv64qi (rtx, rtx, rtx);
extern rtx gen_avx512bw_permvarv64qi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_permvarv16qi (rtx, rtx, rtx);
extern rtx gen_avx512vl_permvarv16qi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_permvarv32qi (rtx, rtx, rtx);
extern rtx gen_avx512vl_permvarv32qi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_permvarv8hi (rtx, rtx, rtx);
extern rtx gen_avx512vl_permvarv8hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_permvarv16hi (rtx, rtx, rtx);
extern rtx gen_avx512vl_permvarv16hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_permvarv32hi (rtx, rtx, rtx);
extern rtx gen_avx512bw_permvarv32hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_permv4di_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_permv4di_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_permv4df_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_permv4df_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_permv8di_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_permv8di_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_permv8df_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_permv8df_1_mask (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_permv2ti (rtx, rtx, rtx, rtx);
extern rtx gen_avx2_vec_dupv4df (rtx, rtx);
extern rtx gen_avx512f_vec_dupv16si_1 (rtx, rtx);
extern rtx gen_avx512f_vec_dupv8di_1 (rtx, rtx);
extern rtx gen_avx512bw_vec_dupv32hi_1 (rtx, rtx);
extern rtx gen_avx512bw_vec_dupv64qi_1 (rtx, rtx);
extern rtx gen_avx512f_vec_dupv16si (rtx, rtx);
extern rtx gen_avx512f_vec_dupv16si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dupv8si (rtx, rtx);
extern rtx gen_avx512vl_vec_dupv8si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dupv4si (rtx, rtx);
extern rtx gen_avx512vl_vec_dupv4si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vec_dupv8di (rtx, rtx);
extern rtx gen_avx512f_vec_dupv8di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dupv4di (rtx, rtx);
extern rtx gen_avx512vl_vec_dupv4di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dupv2di (rtx, rtx);
extern rtx gen_avx512vl_vec_dupv2di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vec_dupv16sf (rtx, rtx);
extern rtx gen_avx512f_vec_dupv16sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dupv8sf (rtx, rtx);
extern rtx gen_avx512vl_vec_dupv8sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dupv4sf (rtx, rtx);
extern rtx gen_avx512vl_vec_dupv4sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vec_dupv8df (rtx, rtx);
extern rtx gen_avx512f_vec_dupv8df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dupv4df (rtx, rtx);
extern rtx gen_avx512vl_vec_dupv4df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dupv2df (rtx, rtx);
extern rtx gen_avx512vl_vec_dupv2df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vec_dupv64qi (rtx, rtx);
extern rtx gen_avx512bw_vec_dupv64qi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dupv16qi (rtx, rtx);
extern rtx gen_avx512vl_vec_dupv16qi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dupv32qi (rtx, rtx);
extern rtx gen_avx512vl_vec_dupv32qi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vec_dupv32hi (rtx, rtx);
extern rtx gen_avx512bw_vec_dupv32hi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dupv16hi (rtx, rtx);
extern rtx gen_avx512vl_vec_dupv16hi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dupv8hi (rtx, rtx);
extern rtx gen_avx512vl_vec_dupv8hi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_broadcastv16sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_broadcastv16si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_broadcastv8df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_broadcastv8di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vec_dup_gprv64qi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dup_gprv16qi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dup_gprv32qi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vec_dup_gprv32hi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dup_gprv16hi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dup_gprv8hi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vec_dup_gprv16si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dup_gprv8si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dup_gprv4si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vec_dup_gprv8di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dup_gprv4di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dup_gprv2di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vec_dup_gprv16sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dup_gprv8sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dup_gprv4sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vec_dup_gprv8df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dup_gprv4df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vec_dup_gprv2df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vec_dupv4sf (rtx, rtx);
extern rtx gen_avx2_vbroadcasti128_v32qi (rtx, rtx);
extern rtx gen_avx2_vbroadcasti128_v16hi (rtx, rtx);
extern rtx gen_avx2_vbroadcasti128_v8si (rtx, rtx);
extern rtx gen_avx2_vbroadcasti128_v4di (rtx, rtx);
extern rtx gen_vec_dupv8si (rtx, rtx);
extern rtx gen_vec_dupv8sf (rtx, rtx);
extern rtx gen_vec_dupv4di (rtx, rtx);
extern rtx gen_vec_dupv4df (rtx, rtx);
extern rtx gen_avx_vbroadcastf128_v32qi (rtx, rtx);
extern rtx gen_avx_vbroadcastf128_v16hi (rtx, rtx);
extern rtx gen_avx_vbroadcastf128_v8si (rtx, rtx);
extern rtx gen_avx_vbroadcastf128_v4di (rtx, rtx);
extern rtx gen_avx_vbroadcastf128_v8sf (rtx, rtx);
extern rtx gen_avx_vbroadcastf128_v4df (rtx, rtx);
extern rtx gen_avx512dq_broadcastv16si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_broadcastv8si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_broadcastv4si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_broadcastv16sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_broadcastv8sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_broadcastv8si_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_broadcastv8sf_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_broadcastv16sf_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_broadcastv16si_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_broadcastv8di_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_broadcastv8df_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_broadcastv4di_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_broadcastv4df_mask_1 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512cd_maskb_vec_dupv8di (rtx, rtx);
extern rtx gen_avx512cd_maskb_vec_dupv4di (rtx, rtx);
extern rtx gen_avx512cd_maskb_vec_dupv2di (rtx, rtx);
extern rtx gen_avx512cd_maskw_vec_dupv16si (rtx, rtx);
extern rtx gen_avx512cd_maskw_vec_dupv8si (rtx, rtx);
extern rtx gen_avx512cd_maskw_vec_dupv4si (rtx, rtx);
extern rtx gen_avx512f_vpermilvarv16sf3 (rtx, rtx, rtx);
extern rtx gen_avx512f_vpermilvarv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_vpermilvarv8sf3 (rtx, rtx, rtx);
extern rtx gen_avx_vpermilvarv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_vpermilvarv4sf3 (rtx, rtx, rtx);
extern rtx gen_avx_vpermilvarv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermilvarv8df3 (rtx, rtx, rtx);
extern rtx gen_avx512f_vpermilvarv8df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_vpermilvarv4df3 (rtx, rtx, rtx);
extern rtx gen_avx_vpermilvarv4df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_vpermilvarv2df3 (rtx, rtx, rtx);
extern rtx gen_avx_vpermilvarv2df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermi2varv16si3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermi2varv16si3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermi2varv16sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermi2varv16sf3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermi2varv8di3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermi2varv8di3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermi2varv8df3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermi2varv8df3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv8si3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv8si3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv8sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv8sf3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv4di3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv4di3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv4df3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv4df3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv4si3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv4si3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv4sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv4sf3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv2di3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv2di3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv2df3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv2df3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vpermi2varv64qi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vpermi2varv64qi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv16qi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv16qi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv32qi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv32qi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv8hi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv8hi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv16hi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv16hi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vpermi2varv32hi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vpermi2varv32hi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermi2varv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermi2varv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermi2varv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermi2varv8df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv8si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv4di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv4df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv4si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv2df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vpermi2varv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vpermi2varv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermt2varv16si3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermt2varv16si3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermt2varv16sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermt2varv16sf3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermt2varv8di3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermt2varv8di3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermt2varv8df3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermt2varv8df3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv8si3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv8si3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv8sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv8sf3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv4di3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv4di3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv4df3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv4df3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv4si3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv4si3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv4sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv4sf3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv2di3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv2di3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv2df3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv2df3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vpermt2varv64qi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vpermt2varv64qi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv16qi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv16qi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv32qi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv32qi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv8hi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv8hi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv16hi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv16hi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vpermt2varv32hi3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vpermt2varv32hi3_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermt2varv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermt2varv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermt2varv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermt2varv8df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv8si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv4di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv4df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv4si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv2df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vpermt2varv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vpermt2varv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v4di (rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v4di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v4df (rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v4df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v4di (rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v4di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v4df (rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v4df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v8si (rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v8si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v8sf (rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v8sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v8si (rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v8si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v8sf (rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v8sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v16hi (rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v16hi (rtx, rtx, rtx);
extern rtx gen_vec_set_lo_v32qi (rtx, rtx, rtx);
extern rtx gen_vec_set_hi_v32qi (rtx, rtx, rtx);
extern rtx gen_avx_maskloadps (rtx, rtx, rtx);
extern rtx gen_avx_maskloadpd (rtx, rtx, rtx);
extern rtx gen_avx_maskloadps256 (rtx, rtx, rtx);
extern rtx gen_avx_maskloadpd256 (rtx, rtx, rtx);
extern rtx gen_avx2_maskloadd (rtx, rtx, rtx);
extern rtx gen_avx2_maskloadq (rtx, rtx, rtx);
extern rtx gen_avx2_maskloadd256 (rtx, rtx, rtx);
extern rtx gen_avx2_maskloadq256 (rtx, rtx, rtx);
extern rtx gen_avx_maskstoreps (rtx, rtx, rtx);
extern rtx gen_avx_maskstorepd (rtx, rtx, rtx);
extern rtx gen_avx_maskstoreps256 (rtx, rtx, rtx);
extern rtx gen_avx_maskstorepd256 (rtx, rtx, rtx);
extern rtx gen_avx2_maskstored (rtx, rtx, rtx);
extern rtx gen_avx2_maskstoreq (rtx, rtx, rtx);
extern rtx gen_avx2_maskstored256 (rtx, rtx, rtx);
extern rtx gen_avx2_maskstoreq256 (rtx, rtx, rtx);
extern rtx gen_avx_si256_si (rtx, rtx);
extern rtx gen_avx_ps256_ps (rtx, rtx);
extern rtx gen_avx_pd256_pd (rtx, rtx);
extern rtx gen_avx2_ashrvv4si (rtx, rtx, rtx);
extern rtx gen_avx2_ashrvv4si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_ashrvv8si (rtx, rtx, rtx);
extern rtx gen_avx2_ashrvv8si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_ashrvv16si (rtx, rtx, rtx);
extern rtx gen_avx512f_ashrvv16si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_ashrvv2di (rtx, rtx, rtx);
extern rtx gen_avx2_ashrvv2di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_ashrvv4di (rtx, rtx, rtx);
extern rtx gen_avx2_ashrvv4di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_ashrvv8di (rtx, rtx, rtx);
extern rtx gen_avx512f_ashrvv8di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ashrvv8hi (rtx, rtx, rtx);
extern rtx gen_avx512vl_ashrvv8hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ashrvv16hi (rtx, rtx, rtx);
extern rtx gen_avx512vl_ashrvv16hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_ashrvv32hi (rtx, rtx, rtx);
extern rtx gen_avx512bw_ashrvv32hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_ashlvv16si (rtx, rtx, rtx);
extern rtx gen_avx512f_ashlvv16si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_lshrvv16si (rtx, rtx, rtx);
extern rtx gen_avx512f_lshrvv16si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_ashlvv8si (rtx, rtx, rtx);
extern rtx gen_avx2_ashlvv8si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_lshrvv8si (rtx, rtx, rtx);
extern rtx gen_avx2_lshrvv8si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_ashlvv4si (rtx, rtx, rtx);
extern rtx gen_avx2_ashlvv4si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_lshrvv4si (rtx, rtx, rtx);
extern rtx gen_avx2_lshrvv4si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_ashlvv8di (rtx, rtx, rtx);
extern rtx gen_avx512f_ashlvv8di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_lshrvv8di (rtx, rtx, rtx);
extern rtx gen_avx512f_lshrvv8di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_ashlvv4di (rtx, rtx, rtx);
extern rtx gen_avx2_ashlvv4di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_lshrvv4di (rtx, rtx, rtx);
extern rtx gen_avx2_lshrvv4di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_ashlvv2di (rtx, rtx, rtx);
extern rtx gen_avx2_ashlvv2di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_lshrvv2di (rtx, rtx, rtx);
extern rtx gen_avx2_lshrvv2di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ashlvv8hi (rtx, rtx, rtx);
extern rtx gen_avx512vl_ashlvv8hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_lshrvv8hi (rtx, rtx, rtx);
extern rtx gen_avx512vl_lshrvv8hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_ashlvv16hi (rtx, rtx, rtx);
extern rtx gen_avx512vl_ashlvv16hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_lshrvv16hi (rtx, rtx, rtx);
extern rtx gen_avx512vl_lshrvv16hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_ashlvv32hi (rtx, rtx, rtx);
extern rtx gen_avx512bw_ashlvv32hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_lshrvv32hi (rtx, rtx, rtx);
extern rtx gen_avx512bw_lshrvv32hi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_vec_concatv32qi (rtx, rtx, rtx);
extern rtx gen_avx_vec_concatv16hi (rtx, rtx, rtx);
extern rtx gen_avx_vec_concatv8si (rtx, rtx, rtx);
extern rtx gen_avx_vec_concatv4di (rtx, rtx, rtx);
extern rtx gen_avx_vec_concatv8sf (rtx, rtx, rtx);
extern rtx gen_avx_vec_concatv4df (rtx, rtx, rtx);
extern rtx gen_avx_vec_concatv64qi (rtx, rtx, rtx);
extern rtx gen_avx_vec_concatv32hi (rtx, rtx, rtx);
extern rtx gen_avx_vec_concatv16si (rtx, rtx, rtx);
extern rtx gen_avx_vec_concatv8di (rtx, rtx, rtx);
extern rtx gen_avx_vec_concatv16sf (rtx, rtx, rtx);
extern rtx gen_avx_vec_concatv8df (rtx, rtx, rtx);
extern rtx gen_vcvtph2ps (rtx, rtx);
extern rtx gen_vcvtph2ps_mask (rtx, rtx, rtx, rtx);
extern rtx gen_vcvtph2ps256 (rtx, rtx);
extern rtx gen_vcvtph2ps256_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vcvtph2ps512_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vcvtph2ps512_mask_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcvtps2ph256 (rtx, rtx, rtx);
extern rtx gen_vcvtps2ph256_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vcvtps2ph512_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_compressv16si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_compressv16sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_compressv8di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_compressv8df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_compressv8si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_compressv8sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_compressv4di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_compressv4df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_compressv4si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_compressv4sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_compressv2di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_compressv2df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_compressstorev16si_mask (rtx, rtx, rtx);
extern rtx gen_avx512f_compressstorev16sf_mask (rtx, rtx, rtx);
extern rtx gen_avx512f_compressstorev8di_mask (rtx, rtx, rtx);
extern rtx gen_avx512f_compressstorev8df_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_compressstorev8si_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_compressstorev8sf_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_compressstorev4di_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_compressstorev4df_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_compressstorev4si_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_compressstorev4sf_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_compressstorev2di_mask (rtx, rtx, rtx);
extern rtx gen_avx512vl_compressstorev2df_mask (rtx, rtx, rtx);
extern rtx gen_avx512f_expandv16si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_expandv16sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_expandv8di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_expandv8df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_expandv8si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_expandv8sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_expandv4di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_expandv4df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_expandv4si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_expandv4sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_expandv2di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_expandv2df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_rangepv16sf (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_rangepv16sf_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_rangepv16sf_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_rangepv16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_rangepv8sf (rtx, rtx, rtx, rtx);
static inline rtx gen_avx512dq_rangepv8sf_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512dq_rangepv8sf_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_avx512dq_rangepv8sf_mask (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512dq_rangepv8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512dq_rangepv8sf_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_avx512dq_rangepv4sf (rtx, rtx, rtx, rtx);
static inline rtx gen_avx512dq_rangepv4sf_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512dq_rangepv4sf_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_avx512dq_rangepv4sf_mask (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512dq_rangepv4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512dq_rangepv4sf_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_avx512dq_rangepv8df (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_rangepv8df_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_rangepv8df_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_rangepv8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_rangepv4df (rtx, rtx, rtx, rtx);
static inline rtx gen_avx512dq_rangepv4df_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512dq_rangepv4df_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_avx512dq_rangepv4df_mask (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512dq_rangepv4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512dq_rangepv4df_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_avx512dq_rangepv2df (rtx, rtx, rtx, rtx);
static inline rtx gen_avx512dq_rangepv2df_round (rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512dq_rangepv2df_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
{
return 0;
}
extern rtx gen_avx512dq_rangepv2df_mask (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_avx512dq_rangepv2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_avx512dq_rangepv2df_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
{
return 0;
}
extern rtx gen_avx512dq_rangesv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_rangesv4sf_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_rangesv2df (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_rangesv2df_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_fpclassv16sf (rtx, rtx, rtx);
extern rtx gen_avx512dq_fpclassv16sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_fpclassv8sf (rtx, rtx, rtx);
extern rtx gen_avx512dq_fpclassv8sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_fpclassv4sf (rtx, rtx, rtx);
extern rtx gen_avx512dq_fpclassv4sf_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_fpclassv8df (rtx, rtx, rtx);
extern rtx gen_avx512dq_fpclassv8df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_fpclassv4df (rtx, rtx, rtx);
extern rtx gen_avx512dq_fpclassv4df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_fpclassv2df (rtx, rtx, rtx);
extern rtx gen_avx512dq_fpclassv2df_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_vmfpclassv4sf (rtx, rtx, rtx);
extern rtx gen_avx512dq_vmfpclassv2df (rtx, rtx, rtx);
extern rtx gen_avx512f_getmantv16sf (rtx, rtx, rtx);
extern rtx gen_avx512f_getmantv16sf_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_getmantv16sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_getmantv16sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getmantv8sf (rtx, rtx, rtx);
extern rtx gen_avx512vl_getmantv8sf_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getmantv8sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getmantv8sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getmantv4sf (rtx, rtx, rtx);
extern rtx gen_avx512vl_getmantv4sf_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getmantv4sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getmantv4sf_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_getmantv8df (rtx, rtx, rtx);
extern rtx gen_avx512f_getmantv8df_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_getmantv8df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_getmantv8df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getmantv4df (rtx, rtx, rtx);
extern rtx gen_avx512vl_getmantv4df_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getmantv4df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getmantv4df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getmantv2df (rtx, rtx, rtx);
extern rtx gen_avx512vl_getmantv2df_round (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getmantv2df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_getmantv2df_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vgetmantv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vgetmantv4sf_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vgetmantv2df (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vgetmantv2df_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_dbpsadbwv8hi_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_dbpsadbwv16hi_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_dbpsadbwv32hi_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_clzv16si2 (rtx, rtx);
extern rtx gen_clzv16si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_clzv8si2 (rtx, rtx);
extern rtx gen_clzv8si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_clzv4si2 (rtx, rtx);
extern rtx gen_clzv4si2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_clzv8di2 (rtx, rtx);
extern rtx gen_clzv8di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_clzv4di2 (rtx, rtx);
extern rtx gen_clzv4di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_clzv2di2 (rtx, rtx);
extern rtx gen_clzv2di2_mask (rtx, rtx, rtx, rtx);
extern rtx gen_conflictv16si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_conflictv8si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_conflictv4si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_conflictv8di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_conflictv4di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_conflictv2di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sha1msg1 (rtx, rtx, rtx);
extern rtx gen_sha1msg2 (rtx, rtx, rtx);
extern rtx gen_sha1nexte (rtx, rtx, rtx);
extern rtx gen_sha1rnds4 (rtx, rtx, rtx, rtx);
extern rtx gen_sha256msg1 (rtx, rtx, rtx);
extern rtx gen_sha256msg2 (rtx, rtx, rtx);
extern rtx gen_sha256rnds2 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_si512_si (rtx, rtx);
extern rtx gen_avx512f_ps512_ps (rtx, rtx);
extern rtx gen_avx512f_pd512_pd (rtx, rtx);
extern rtx gen_avx512f_si512_256si (rtx, rtx);
extern rtx gen_avx512f_ps512_256ps (rtx, rtx);
extern rtx gen_avx512f_pd512_256pd (rtx, rtx);
extern rtx gen_vpamdd52luqv8di (rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52luqv8di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52huqv8di (rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52huqv8di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52luqv4di (rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52luqv4di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52huqv4di (rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52huqv4di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52luqv2di (rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52luqv2di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52huqv2di (rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52huqv2di_maskz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52luqv8di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52huqv8di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52luqv4di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52huqv4di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52luqv2di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52huqv2di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpmultishiftqbv64qi (rtx, rtx, rtx);
extern rtx gen_vpmultishiftqbv64qi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpmultishiftqbv16qi (rtx, rtx, rtx);
extern rtx gen_vpmultishiftqbv16qi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpmultishiftqbv32qi (rtx, rtx, rtx);
extern rtx gen_vpmultishiftqbv32qi_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mfence_sse2 (rtx);
extern rtx gen_mfence_nosse (rtx);
extern rtx gen_atomic_loaddi_fpu (rtx, rtx, rtx);
extern rtx gen_atomic_storeqi_1 (rtx, rtx, rtx);
extern rtx gen_atomic_storehi_1 (rtx, rtx, rtx);
extern rtx gen_atomic_storesi_1 (rtx, rtx, rtx);
extern rtx gen_atomic_storedi_1 (rtx, rtx, rtx);
extern rtx gen_atomic_storedi_fpu (rtx, rtx, rtx);
extern rtx gen_loaddi_via_fpu (rtx, rtx);
extern rtx gen_storedi_via_fpu (rtx, rtx);
extern rtx gen_atomic_compare_and_swapdi_doubleword (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_atomic_compare_and_swapti_doubleword (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_atomic_compare_and_swapqi_1 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_atomic_compare_and_swaphi_1 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_atomic_compare_and_swapsi_1 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_atomic_compare_and_swapdi_1 (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_addqi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_addhi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_addsi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_fetch_adddi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_exchangeqi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_exchangehi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_exchangesi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_exchangedi (rtx, rtx, rtx, rtx);
extern rtx gen_atomic_addqi (rtx, rtx, rtx);
extern rtx gen_atomic_addhi (rtx, rtx, rtx);
extern rtx gen_atomic_addsi (rtx, rtx, rtx);
extern rtx gen_atomic_adddi (rtx, rtx, rtx);
extern rtx gen_atomic_subqi (rtx, rtx, rtx);
extern rtx gen_atomic_subhi (rtx, rtx, rtx);
extern rtx gen_atomic_subsi (rtx, rtx, rtx);
extern rtx gen_atomic_subdi (rtx, rtx, rtx);
extern rtx gen_atomic_andqi (rtx, rtx, rtx);
extern rtx gen_atomic_orqi (rtx, rtx, rtx);
extern rtx gen_atomic_xorqi (rtx, rtx, rtx);
extern rtx gen_atomic_andhi (rtx, rtx, rtx);
extern rtx gen_atomic_orhi (rtx, rtx, rtx);
extern rtx gen_atomic_xorhi (rtx, rtx, rtx);
extern rtx gen_atomic_andsi (rtx, rtx, rtx);
extern rtx gen_atomic_orsi (rtx, rtx, rtx);
extern rtx gen_atomic_xorsi (rtx, rtx, rtx);
extern rtx gen_atomic_anddi (rtx, rtx, rtx);
extern rtx gen_atomic_ordi (rtx, rtx, rtx);
extern rtx gen_atomic_xordi (rtx, rtx, rtx);
extern rtx gen_cbranchqi4 (rtx, rtx, rtx, rtx);
extern rtx gen_cbranchhi4 (rtx, rtx, rtx, rtx);
extern rtx gen_cbranchsi4 (rtx, rtx, rtx, rtx);
extern rtx gen_cbranchdi4 (rtx, rtx, rtx, rtx);
extern rtx gen_cbranchti4 (rtx, rtx, rtx, rtx);
extern rtx gen_cstoreqi4 (rtx, rtx, rtx, rtx);
extern rtx gen_cstorehi4 (rtx, rtx, rtx, rtx);
extern rtx gen_cstoresi4 (rtx, rtx, rtx, rtx);
extern rtx gen_cstoredi4 (rtx, rtx, rtx, rtx);
extern rtx gen_cmpsi_1 (rtx, rtx);
extern rtx gen_cmpdi_1 (rtx, rtx);
extern rtx gen_cmpqi_ext_3 (rtx, rtx);
extern rtx gen_cbranchxf4 (rtx, rtx, rtx, rtx);
extern rtx gen_cstorexf4 (rtx, rtx, rtx, rtx);
extern rtx gen_cbranchsf4 (rtx, rtx, rtx, rtx);
extern rtx gen_cbranchdf4 (rtx, rtx, rtx, rtx);
extern rtx gen_cstoresf4 (rtx, rtx, rtx, rtx);
extern rtx gen_cstoredf4 (rtx, rtx, rtx, rtx);
extern rtx gen_cbranchcc4 (rtx, rtx, rtx, rtx);
extern rtx gen_cstorecc4 (rtx, rtx, rtx, rtx);
extern rtx gen_reload_noff_store (rtx, rtx, rtx);
extern rtx gen_reload_noff_load (rtx, rtx, rtx);
extern rtx gen_movxi (rtx, rtx);
extern rtx gen_movoi (rtx, rtx);
extern rtx gen_movti (rtx, rtx);
extern rtx gen_movcdi (rtx, rtx);
extern rtx gen_movqi (rtx, rtx);
extern rtx gen_movhi (rtx, rtx);
extern rtx gen_movsi (rtx, rtx);
extern rtx gen_movdi (rtx, rtx);
extern rtx gen_movstrictqi (rtx, rtx);
extern rtx gen_movstricthi (rtx, rtx);
extern rtx gen_extvhi (rtx, rtx, rtx, rtx);
extern rtx gen_extvsi (rtx, rtx, rtx, rtx);
extern rtx gen_extzvhi (rtx, rtx, rtx, rtx);
extern rtx gen_extzvsi (rtx, rtx, rtx, rtx);
extern rtx gen_extzvdi (rtx, rtx, rtx, rtx);
extern rtx gen_insvhi (rtx, rtx, rtx, rtx);
extern rtx gen_insvsi (rtx, rtx, rtx, rtx);
extern rtx gen_insvdi (rtx, rtx, rtx, rtx);
extern rtx gen_movtf (rtx, rtx);
extern rtx gen_movsf (rtx, rtx);
extern rtx gen_movdf (rtx, rtx);
extern rtx gen_movxf (rtx, rtx);
extern rtx gen_zero_extendsidi2 (rtx, rtx);
extern rtx gen_zero_extendqisi2 (rtx, rtx);
extern rtx gen_zero_extendhisi2 (rtx, rtx);
extern rtx gen_zero_extendqihi2 (rtx, rtx);
extern rtx gen_extendsidi2 (rtx, rtx);
extern rtx gen_extendsfdf2 (rtx, rtx);
extern rtx gen_extendsfxf2 (rtx, rtx);
extern rtx gen_extenddfxf2 (rtx, rtx);
extern rtx gen_truncdfsf2 (rtx, rtx);
extern rtx gen_truncdfsf2_with_temp (rtx, rtx, rtx);
extern rtx gen_truncxfsf2 (rtx, rtx);
extern rtx gen_truncxfdf2 (rtx, rtx);
extern rtx gen_fix_truncxfdi2 (rtx, rtx);
extern rtx gen_fix_truncsfdi2 (rtx, rtx);
extern rtx gen_fix_truncdfdi2 (rtx, rtx);
extern rtx gen_fix_truncxfsi2 (rtx, rtx);
extern rtx gen_fix_truncsfsi2 (rtx, rtx);
extern rtx gen_fix_truncdfsi2 (rtx, rtx);
extern rtx gen_fix_truncsfhi2 (rtx, rtx);
extern rtx gen_fix_truncdfhi2 (rtx, rtx);
extern rtx gen_fix_truncxfhi2 (rtx, rtx);
extern rtx gen_fixuns_truncsfsi2 (rtx, rtx);
extern rtx gen_fixuns_truncdfsi2 (rtx, rtx);
extern rtx gen_fixuns_truncsfhi2 (rtx, rtx);
extern rtx gen_fixuns_truncdfhi2 (rtx, rtx);
extern rtx gen_floatsisf2 (rtx, rtx);
extern rtx gen_floatdisf2 (rtx, rtx);
extern rtx gen_floatsidf2 (rtx, rtx);
extern rtx gen_floatdidf2 (rtx, rtx);
extern rtx gen_floatunsqisf2 (rtx, rtx);
extern rtx gen_floatunshisf2 (rtx, rtx);
extern rtx gen_floatunsqidf2 (rtx, rtx);
extern rtx gen_floatunshidf2 (rtx, rtx);
extern rtx gen_floatunssisf2 (rtx, rtx);
extern rtx gen_floatunssidf2 (rtx, rtx);
extern rtx gen_floatunssixf2 (rtx, rtx);
extern rtx gen_floatunsdisf2 (rtx, rtx);
extern rtx gen_floatunsdidf2 (rtx, rtx);
extern rtx gen_addqi3 (rtx, rtx, rtx);
extern rtx gen_addhi3 (rtx, rtx, rtx);
extern rtx gen_addsi3 (rtx, rtx, rtx);
extern rtx gen_adddi3 (rtx, rtx, rtx);
extern rtx gen_addti3 (rtx, rtx, rtx);
extern rtx gen_addvqi4 (rtx, rtx, rtx, rtx);
extern rtx gen_addvhi4 (rtx, rtx, rtx, rtx);
extern rtx gen_addvsi4 (rtx, rtx, rtx, rtx);
extern rtx gen_addvdi4 (rtx, rtx, rtx, rtx);
extern rtx gen_uaddvqi4 (rtx, rtx, rtx, rtx);
extern rtx gen_uaddvhi4 (rtx, rtx, rtx, rtx);
extern rtx gen_uaddvsi4 (rtx, rtx, rtx, rtx);
extern rtx gen_uaddvdi4 (rtx, rtx, rtx, rtx);
extern rtx gen_subqi3 (rtx, rtx, rtx);
extern rtx gen_subhi3 (rtx, rtx, rtx);
extern rtx gen_subsi3 (rtx, rtx, rtx);
extern rtx gen_subdi3 (rtx, rtx, rtx);
extern rtx gen_subti3 (rtx, rtx, rtx);
extern rtx gen_subvqi4 (rtx, rtx, rtx, rtx);
extern rtx gen_subvhi4 (rtx, rtx, rtx, rtx);
extern rtx gen_subvsi4 (rtx, rtx, rtx, rtx);
extern rtx gen_subvdi4 (rtx, rtx, rtx, rtx);
extern rtx gen_usubvqi4 (rtx, rtx, rtx, rtx);
extern rtx gen_usubvhi4 (rtx, rtx, rtx, rtx);
extern rtx gen_usubvsi4 (rtx, rtx, rtx, rtx);
extern rtx gen_usubvdi4 (rtx, rtx, rtx, rtx);
extern rtx gen_addqi3_cconly_overflow (rtx, rtx);
extern rtx gen_addxf3 (rtx, rtx, rtx);
extern rtx gen_subxf3 (rtx, rtx, rtx);
extern rtx gen_addsf3 (rtx, rtx, rtx);
extern rtx gen_subsf3 (rtx, rtx, rtx);
extern rtx gen_adddf3 (rtx, rtx, rtx);
extern rtx gen_subdf3 (rtx, rtx, rtx);
extern rtx gen_mulhi3 (rtx, rtx, rtx);
extern rtx gen_mulsi3 (rtx, rtx, rtx);
extern rtx gen_muldi3 (rtx, rtx, rtx);
extern rtx gen_mulqi3 (rtx, rtx, rtx);
extern rtx gen_mulvhi4 (rtx, rtx, rtx, rtx);
extern rtx gen_mulvsi4 (rtx, rtx, rtx, rtx);
extern rtx gen_mulvdi4 (rtx, rtx, rtx, rtx);
extern rtx gen_umulvhi4 (rtx, rtx, rtx, rtx);
extern rtx gen_umulvsi4 (rtx, rtx, rtx, rtx);
extern rtx gen_umulvdi4 (rtx, rtx, rtx, rtx);
extern rtx gen_mulvqi4 (rtx, rtx, rtx, rtx);
extern rtx gen_umulvqi4 (rtx, rtx, rtx, rtx);
extern rtx gen_mulsidi3 (rtx, rtx, rtx);
extern rtx gen_umulsidi3 (rtx, rtx, rtx);
extern rtx gen_mulditi3 (rtx, rtx, rtx);
extern rtx gen_umulditi3 (rtx, rtx, rtx);
extern rtx gen_mulqihi3 (rtx, rtx, rtx);
extern rtx gen_umulqihi3 (rtx, rtx, rtx);
extern rtx gen_smulsi3_highpart (rtx, rtx, rtx);
extern rtx gen_umulsi3_highpart (rtx, rtx, rtx);
extern rtx gen_smuldi3_highpart (rtx, rtx, rtx);
extern rtx gen_umuldi3_highpart (rtx, rtx, rtx);
extern rtx gen_mulxf3 (rtx, rtx, rtx);
extern rtx gen_mulsf3 (rtx, rtx, rtx);
extern rtx gen_muldf3 (rtx, rtx, rtx);
extern rtx gen_divxf3 (rtx, rtx, rtx);
extern rtx gen_divsf3 (rtx, rtx, rtx);
extern rtx gen_divdf3 (rtx, rtx, rtx);
extern rtx gen_divmodhi4 (rtx, rtx, rtx, rtx);
extern rtx gen_divmodsi4 (rtx, rtx, rtx, rtx);
extern rtx gen_divmoddi4 (rtx, rtx, rtx, rtx);
extern rtx gen_divmodqi4 (rtx, rtx, rtx, rtx);
extern rtx gen_udivmodhi4 (rtx, rtx, rtx, rtx);
extern rtx gen_udivmodsi4 (rtx, rtx, rtx, rtx);
extern rtx gen_udivmoddi4 (rtx, rtx, rtx, rtx);
extern rtx gen_udivmodqi4 (rtx, rtx, rtx, rtx);
extern rtx gen_testsi_ccno_1 (rtx, rtx);
extern rtx gen_testqi_ccz_1 (rtx, rtx);
extern rtx gen_testdi_ccno_1 (rtx, rtx);
extern rtx gen_testqi_ext_ccno_0 (rtx, rtx);
extern rtx gen_andqi3 (rtx, rtx, rtx);
extern rtx gen_andhi3 (rtx, rtx, rtx);
extern rtx gen_andsi3 (rtx, rtx, rtx);
extern rtx gen_anddi3 (rtx, rtx, rtx);
extern rtx gen_iorqi3 (rtx, rtx, rtx);
extern rtx gen_xorqi3 (rtx, rtx, rtx);
extern rtx gen_iorhi3 (rtx, rtx, rtx);
extern rtx gen_xorhi3 (rtx, rtx, rtx);
extern rtx gen_iorsi3 (rtx, rtx, rtx);
extern rtx gen_xorsi3 (rtx, rtx, rtx);
extern rtx gen_iordi3 (rtx, rtx, rtx);
extern rtx gen_xordi3 (rtx, rtx, rtx);
extern rtx gen_xorqi_cc_ext_1 (rtx, rtx, rtx);
extern rtx gen_negqi2 (rtx, rtx);
extern rtx gen_neghi2 (rtx, rtx);
extern rtx gen_negsi2 (rtx, rtx);
extern rtx gen_negdi2 (rtx, rtx);
extern rtx gen_negti2 (rtx, rtx);
extern rtx gen_negvqi3 (rtx, rtx, rtx);
extern rtx gen_negvhi3 (rtx, rtx, rtx);
extern rtx gen_negvsi3 (rtx, rtx, rtx);
extern rtx gen_negvdi3 (rtx, rtx, rtx);
extern rtx gen_abssf2 (rtx, rtx);
extern rtx gen_negsf2 (rtx, rtx);
extern rtx gen_absdf2 (rtx, rtx);
extern rtx gen_negdf2 (rtx, rtx);
extern rtx gen_absxf2 (rtx, rtx);
extern rtx gen_negxf2 (rtx, rtx);
extern rtx gen_abstf2 (rtx, rtx);
extern rtx gen_negtf2 (rtx, rtx);
extern rtx gen_copysignsf3 (rtx, rtx, rtx);
extern rtx gen_copysigndf3 (rtx, rtx, rtx);
extern rtx gen_copysigntf3 (rtx, rtx, rtx);
extern rtx gen_one_cmplqi2 (rtx, rtx);
extern rtx gen_one_cmplhi2 (rtx, rtx);
extern rtx gen_one_cmplsi2 (rtx, rtx);
extern rtx gen_one_cmpldi2 (rtx, rtx);
extern rtx gen_ashlqi3 (rtx, rtx, rtx);
extern rtx gen_ashlhi3 (rtx, rtx, rtx);
extern rtx gen_ashlsi3 (rtx, rtx, rtx);
extern rtx gen_ashldi3 (rtx, rtx, rtx);
extern rtx gen_ashlti3 (rtx, rtx, rtx);
extern rtx gen_x86_shiftsi_adj_1 (rtx, rtx, rtx, rtx);
extern rtx gen_x86_shiftdi_adj_1 (rtx, rtx, rtx, rtx);
extern rtx gen_x86_shiftsi_adj_2 (rtx, rtx, rtx);
extern rtx gen_x86_shiftdi_adj_2 (rtx, rtx, rtx);
extern rtx gen_lshrqi3 (rtx, rtx, rtx);
extern rtx gen_ashrqi3 (rtx, rtx, rtx);
extern rtx gen_lshrhi3 (rtx, rtx, rtx);
extern rtx gen_ashrhi3 (rtx, rtx, rtx);
extern rtx gen_lshrsi3 (rtx, rtx, rtx);
extern rtx gen_ashrsi3 (rtx, rtx, rtx);
extern rtx gen_lshrdi3 (rtx, rtx, rtx);
extern rtx gen_ashrdi3 (rtx, rtx, rtx);
extern rtx gen_lshrti3 (rtx, rtx, rtx);
extern rtx gen_ashrti3 (rtx, rtx, rtx);
extern rtx gen_x86_shiftsi_adj_3 (rtx, rtx, rtx);
extern rtx gen_x86_shiftdi_adj_3 (rtx, rtx, rtx);
extern rtx gen_rotlti3 (rtx, rtx, rtx);
extern rtx gen_rotrti3 (rtx, rtx, rtx);
extern rtx gen_rotldi3 (rtx, rtx, rtx);
extern rtx gen_rotrdi3 (rtx, rtx, rtx);
extern rtx gen_rotlqi3 (rtx, rtx, rtx);
extern rtx gen_rotrqi3 (rtx, rtx, rtx);
extern rtx gen_rotlhi3 (rtx, rtx, rtx);
extern rtx gen_rotrhi3 (rtx, rtx, rtx);
extern rtx gen_rotlsi3 (rtx, rtx, rtx);
extern rtx gen_rotrsi3 (rtx, rtx, rtx);
extern rtx gen_indirect_jump (rtx);
extern rtx gen_tablejump (rtx, rtx);
extern rtx gen_call (rtx, rtx, rtx);
extern rtx gen_sibcall (rtx, rtx, rtx);
extern rtx gen_call_pop (rtx, rtx, rtx, rtx);
extern rtx gen_call_value (rtx, rtx, rtx, rtx);
extern rtx gen_sibcall_value (rtx, rtx, rtx, rtx);
extern rtx gen_call_value_pop (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_untyped_call (rtx, rtx, rtx);
extern rtx gen_memory_blockage (void);
extern rtx gen_return (void);
extern rtx gen_simple_return (void);
extern rtx gen_prologue (void);
extern rtx gen_set_got (rtx);
extern rtx gen_set_got_labelled (rtx, rtx);
extern rtx gen_epilogue (void);
extern rtx gen_sibcall_epilogue (void);
extern rtx gen_eh_return (rtx);
extern rtx gen_split_stack_prologue (void);
extern rtx gen_split_stack_space_check (rtx, rtx);
extern rtx gen_ffssi2 (rtx, rtx);
extern rtx gen_ffsdi2 (rtx, rtx);
extern rtx gen_ctzsi2 (rtx, rtx);
extern rtx gen_ctzdi2 (rtx, rtx);
extern rtx gen_bmi_tzcnt_hi (rtx, rtx);
extern rtx gen_bmi_tzcnt_si (rtx, rtx);
extern rtx gen_bmi_tzcnt_di (rtx, rtx);
extern rtx gen_clzsi2 (rtx, rtx);
extern rtx gen_clzdi2 (rtx, rtx);
extern rtx gen_clzsi2_lzcnt (rtx, rtx);
extern rtx gen_clzdi2_lzcnt (rtx, rtx);
extern rtx gen_lzcnt_hi (rtx, rtx);
extern rtx gen_lzcnt_si (rtx, rtx);
extern rtx gen_lzcnt_di (rtx, rtx);
extern rtx gen_bmi2_bzhi_si3 (rtx, rtx, rtx);
extern rtx gen_bmi2_bzhi_di3 (rtx, rtx, rtx);
extern rtx gen_popcounthi2 (rtx, rtx);
extern rtx gen_popcountsi2 (rtx, rtx);
extern rtx gen_popcountdi2 (rtx, rtx);
extern rtx gen_bswapdi2 (rtx, rtx);
extern rtx gen_bswapsi2 (rtx, rtx);
extern rtx gen_paritydi2 (rtx, rtx);
extern rtx gen_paritysi2 (rtx, rtx);
extern rtx gen_tls_global_dynamic_32 (rtx, rtx, rtx, rtx);
extern rtx gen_tls_global_dynamic_64_si (rtx, rtx, rtx);
extern rtx gen_tls_global_dynamic_64_di (rtx, rtx, rtx);
extern rtx gen_tls_local_dynamic_base_32 (rtx, rtx, rtx);
extern rtx gen_tls_local_dynamic_base_64_si (rtx, rtx);
extern rtx gen_tls_local_dynamic_base_64_di (rtx, rtx);
extern rtx gen_tls_dynamic_gnu2_32 (rtx, rtx, rtx);
extern rtx gen_tls_dynamic_gnu2_64 (rtx, rtx);
extern rtx gen_rsqrtsf2 (rtx, rtx);
extern rtx gen_sqrtsf2 (rtx, rtx);
extern rtx gen_sqrtdf2 (rtx, rtx);
extern rtx gen_fmodxf3 (rtx, rtx, rtx);
extern rtx gen_fmodsf3 (rtx, rtx, rtx);
extern rtx gen_fmoddf3 (rtx, rtx, rtx);
extern rtx gen_remainderxf3 (rtx, rtx, rtx);
extern rtx gen_remaindersf3 (rtx, rtx, rtx);
extern rtx gen_remainderdf3 (rtx, rtx, rtx);
extern rtx gen_sincossf3 (rtx, rtx, rtx);
extern rtx gen_sincosdf3 (rtx, rtx, rtx);
extern rtx gen_tanxf2 (rtx, rtx);
extern rtx gen_tansf2 (rtx, rtx);
extern rtx gen_tandf2 (rtx, rtx);
extern rtx gen_atan2xf3 (rtx, rtx, rtx);
extern rtx gen_atan2sf3 (rtx, rtx, rtx);
extern rtx gen_atan2df3 (rtx, rtx, rtx);
extern rtx gen_atanxf2 (rtx, rtx);
extern rtx gen_atansf2 (rtx, rtx);
extern rtx gen_atandf2 (rtx, rtx);
extern rtx gen_asinxf2 (rtx, rtx);
extern rtx gen_asinsf2 (rtx, rtx);
extern rtx gen_asindf2 (rtx, rtx);
extern rtx gen_acosxf2 (rtx, rtx);
extern rtx gen_acossf2 (rtx, rtx);
extern rtx gen_acosdf2 (rtx, rtx);
extern rtx gen_logxf2 (rtx, rtx);
extern rtx gen_logsf2 (rtx, rtx);
extern rtx gen_logdf2 (rtx, rtx);
extern rtx gen_log10xf2 (rtx, rtx);
extern rtx gen_log10sf2 (rtx, rtx);
extern rtx gen_log10df2 (rtx, rtx);
extern rtx gen_log2xf2 (rtx, rtx);
extern rtx gen_log2sf2 (rtx, rtx);
extern rtx gen_log2df2 (rtx, rtx);
extern rtx gen_log1pxf2 (rtx, rtx);
extern rtx gen_log1psf2 (rtx, rtx);
extern rtx gen_log1pdf2 (rtx, rtx);
extern rtx gen_logbxf2 (rtx, rtx);
extern rtx gen_logbsf2 (rtx, rtx);
extern rtx gen_logbdf2 (rtx, rtx);
extern rtx gen_ilogbxf2 (rtx, rtx);
extern rtx gen_ilogbsf2 (rtx, rtx);
extern rtx gen_ilogbdf2 (rtx, rtx);
extern rtx gen_expNcorexf3 (rtx, rtx, rtx);
extern rtx gen_expxf2 (rtx, rtx);
extern rtx gen_expsf2 (rtx, rtx);
extern rtx gen_expdf2 (rtx, rtx);
extern rtx gen_exp10xf2 (rtx, rtx);
extern rtx gen_exp10sf2 (rtx, rtx);
extern rtx gen_exp10df2 (rtx, rtx);
extern rtx gen_exp2xf2 (rtx, rtx);
extern rtx gen_exp2sf2 (rtx, rtx);
extern rtx gen_exp2df2 (rtx, rtx);
extern rtx gen_expm1xf2 (rtx, rtx);
extern rtx gen_expm1sf2 (rtx, rtx);
extern rtx gen_expm1df2 (rtx, rtx);
extern rtx gen_ldexpxf3 (rtx, rtx, rtx);
extern rtx gen_ldexpsf3 (rtx, rtx, rtx);
extern rtx gen_ldexpdf3 (rtx, rtx, rtx);
extern rtx gen_scalbxf3 (rtx, rtx, rtx);
extern rtx gen_scalbsf3 (rtx, rtx, rtx);
extern rtx gen_scalbdf3 (rtx, rtx, rtx);
extern rtx gen_significandxf2 (rtx, rtx);
extern rtx gen_significandsf2 (rtx, rtx);
extern rtx gen_significanddf2 (rtx, rtx);
extern rtx gen_rintsf2 (rtx, rtx);
extern rtx gen_rintdf2 (rtx, rtx);
extern rtx gen_roundsf2 (rtx, rtx);
extern rtx gen_rounddf2 (rtx, rtx);
extern rtx gen_roundxf2 (rtx, rtx);
extern rtx gen_lrintxfhi2 (rtx, rtx);
extern rtx gen_lrintxfsi2 (rtx, rtx);
extern rtx gen_lrintxfdi2 (rtx, rtx);
extern rtx gen_lrintsfsi2 (rtx, rtx);
extern rtx gen_lrintsfdi2 (rtx, rtx);
extern rtx gen_lrintdfsi2 (rtx, rtx);
extern rtx gen_lrintdfdi2 (rtx, rtx);
extern rtx gen_lroundsfhi2 (rtx, rtx);
extern rtx gen_lrounddfhi2 (rtx, rtx);
extern rtx gen_lroundxfhi2 (rtx, rtx);
extern rtx gen_lroundsfsi2 (rtx, rtx);
extern rtx gen_lrounddfsi2 (rtx, rtx);
extern rtx gen_lroundxfsi2 (rtx, rtx);
extern rtx gen_lroundsfdi2 (rtx, rtx);
extern rtx gen_lrounddfdi2 (rtx, rtx);
extern rtx gen_lroundxfdi2 (rtx, rtx);
extern rtx gen_floorxf2 (rtx, rtx);
extern rtx gen_ceilxf2 (rtx, rtx);
extern rtx gen_btruncxf2 (rtx, rtx);
extern rtx gen_floorsf2 (rtx, rtx);
extern rtx gen_ceilsf2 (rtx, rtx);
extern rtx gen_btruncsf2 (rtx, rtx);
extern rtx gen_floordf2 (rtx, rtx);
extern rtx gen_ceildf2 (rtx, rtx);
extern rtx gen_btruncdf2 (rtx, rtx);
extern rtx gen_nearbyintxf2 (rtx, rtx);
extern rtx gen_nearbyintsf2 (rtx, rtx);
extern rtx gen_nearbyintdf2 (rtx, rtx);
extern rtx gen_lfloorxfhi2 (rtx, rtx);
extern rtx gen_lceilxfhi2 (rtx, rtx);
extern rtx gen_lfloorxfsi2 (rtx, rtx);
extern rtx gen_lceilxfsi2 (rtx, rtx);
extern rtx gen_lfloorxfdi2 (rtx, rtx);
extern rtx gen_lceilxfdi2 (rtx, rtx);
extern rtx gen_lfloorsfsi2 (rtx, rtx);
extern rtx gen_lceilsfsi2 (rtx, rtx);
extern rtx gen_lfloorsfdi2 (rtx, rtx);
extern rtx gen_lceilsfdi2 (rtx, rtx);
extern rtx gen_lfloordfsi2 (rtx, rtx);
extern rtx gen_lceildfsi2 (rtx, rtx);
extern rtx gen_lfloordfdi2 (rtx, rtx);
extern rtx gen_lceildfdi2 (rtx, rtx);
extern rtx gen_isinfxf2 (rtx, rtx);
extern rtx gen_isinfsf2 (rtx, rtx);
extern rtx gen_isinfdf2 (rtx, rtx);
extern rtx gen_signbitxf2 (rtx, rtx);
extern rtx gen_signbitdf2 (rtx, rtx);
extern rtx gen_signbitsf2 (rtx, rtx);
extern rtx gen_movmemsi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_movmemdi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_strmov (rtx, rtx, rtx, rtx);
extern rtx gen_strmov_singleop (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_rep_mov (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_setmemsi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_setmemdi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_strset (rtx, rtx, rtx);
extern rtx gen_strset_singleop (rtx, rtx, rtx, rtx);
extern rtx gen_rep_stos (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cmpstrnsi (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cmpintqi (rtx);
extern rtx gen_cmpstrnqi_nz_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_cmpstrnqi_1 (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_strlensi (rtx, rtx, rtx, rtx);
extern rtx gen_strlendi (rtx, rtx, rtx, rtx);
extern rtx gen_strlenqi_1 (rtx, rtx, rtx);
extern rtx gen_movqicc (rtx, rtx, rtx, rtx);
extern rtx gen_movhicc (rtx, rtx, rtx, rtx);
extern rtx gen_movsicc (rtx, rtx, rtx, rtx);
extern rtx gen_movdicc (rtx, rtx, rtx, rtx);
extern rtx gen_x86_movsicc_0_m1 (rtx, rtx, rtx);
extern rtx gen_x86_movdicc_0_m1 (rtx, rtx, rtx);
extern rtx gen_movsfcc (rtx, rtx, rtx, rtx);
extern rtx gen_movdfcc (rtx, rtx, rtx, rtx);
extern rtx gen_movxfcc (rtx, rtx, rtx, rtx);
extern rtx gen_addqicc (rtx, rtx, rtx, rtx);
extern rtx gen_addhicc (rtx, rtx, rtx, rtx);
extern rtx gen_addsicc (rtx, rtx, rtx, rtx);
extern rtx gen_adddicc (rtx, rtx, rtx, rtx);
extern rtx gen_allocate_stack (rtx, rtx);
extern rtx gen_probe_stack (rtx);
extern rtx gen_builtin_setjmp_receiver (rtx);
extern rtx gen_prefetch (rtx, rtx, rtx);
extern rtx gen_stack_protect_set (rtx, rtx);
extern rtx gen_stack_protect_test (rtx, rtx, rtx);
extern rtx gen_lwp_llwpcb (rtx);
extern rtx gen_lwp_slwpcb (rtx);
extern rtx gen_lwp_lwpvalsi3 (rtx, rtx, rtx, rtx);
extern rtx gen_lwp_lwpvaldi3 (rtx, rtx, rtx, rtx);
extern rtx gen_lwp_lwpinssi3 (rtx, rtx, rtx, rtx);
extern rtx gen_lwp_lwpinsdi3 (rtx, rtx, rtx, rtx);
extern rtx gen_pause (void);
extern rtx gen_xbegin (rtx);
extern rtx gen_xtest (rtx);
extern rtx gen_bnd32_mk (rtx, rtx, rtx);
extern rtx gen_bnd64_mk (rtx, rtx, rtx);
extern rtx gen_movbnd32 (rtx, rtx);
extern rtx gen_movbnd64 (rtx, rtx);
extern rtx gen_bnd32_cl (rtx, rtx);
extern rtx gen_bnd32_cu (rtx, rtx);
extern rtx gen_bnd32_cn (rtx, rtx);
extern rtx gen_bnd64_cl (rtx, rtx);
extern rtx gen_bnd64_cu (rtx, rtx);
extern rtx gen_bnd64_cn (rtx, rtx);
extern rtx gen_bnd32_ldx (rtx, rtx, rtx);
extern rtx gen_bnd64_ldx (rtx, rtx, rtx);
extern rtx gen_bnd32_stx (rtx, rtx, rtx);
extern rtx gen_bnd64_stx (rtx, rtx, rtx);
extern rtx gen_rdpkru (rtx);
extern rtx gen_wrpkru (rtx);
extern rtx gen_movv8qi (rtx, rtx);
extern rtx gen_movv4hi (rtx, rtx);
extern rtx gen_movv2si (rtx, rtx);
extern rtx gen_movv1di (rtx, rtx);
extern rtx gen_movv2sf (rtx, rtx);
extern rtx gen_movmisalignv8qi (rtx, rtx);
extern rtx gen_movmisalignv4hi (rtx, rtx);
extern rtx gen_movmisalignv2si (rtx, rtx);
extern rtx gen_movmisalignv1di (rtx, rtx);
extern rtx gen_movmisalignv2sf (rtx, rtx);
extern rtx gen_mmx_addv2sf3 (rtx, rtx, rtx);
extern rtx gen_mmx_subv2sf3 (rtx, rtx, rtx);
extern rtx gen_mmx_subrv2sf3 (rtx, rtx, rtx);
extern rtx gen_mmx_mulv2sf3 (rtx, rtx, rtx);
extern rtx gen_mmx_smaxv2sf3 (rtx, rtx, rtx);
extern rtx gen_mmx_sminv2sf3 (rtx, rtx, rtx);
extern rtx gen_mmx_eqv2sf3 (rtx, rtx, rtx);
extern rtx gen_vec_setv2sf (rtx, rtx, rtx);
extern rtx gen_vec_extractv2sf (rtx, rtx, rtx);
extern rtx gen_vec_initv2sf (rtx, rtx);
extern rtx gen_mmx_addv8qi3 (rtx, rtx, rtx);
extern rtx gen_mmx_subv8qi3 (rtx, rtx, rtx);
extern rtx gen_mmx_addv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_subv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_addv2si3 (rtx, rtx, rtx);
extern rtx gen_mmx_subv2si3 (rtx, rtx, rtx);
extern rtx gen_mmx_addv1di3 (rtx, rtx, rtx);
extern rtx gen_mmx_subv1di3 (rtx, rtx, rtx);
extern rtx gen_mmx_ssaddv8qi3 (rtx, rtx, rtx);
extern rtx gen_mmx_usaddv8qi3 (rtx, rtx, rtx);
extern rtx gen_mmx_sssubv8qi3 (rtx, rtx, rtx);
extern rtx gen_mmx_ussubv8qi3 (rtx, rtx, rtx);
extern rtx gen_mmx_ssaddv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_usaddv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_sssubv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_ussubv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_mulv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_smulv4hi3_highpart (rtx, rtx, rtx);
extern rtx gen_mmx_umulv4hi3_highpart (rtx, rtx, rtx);
extern rtx gen_mmx_pmaddwd (rtx, rtx, rtx);
extern rtx gen_mmx_pmulhrwv4hi3 (rtx, rtx, rtx);
extern rtx gen_sse2_umulv1siv1di3 (rtx, rtx, rtx);
extern rtx gen_mmx_smaxv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_sminv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_umaxv8qi3 (rtx, rtx, rtx);
extern rtx gen_mmx_uminv8qi3 (rtx, rtx, rtx);
extern rtx gen_mmx_eqv8qi3 (rtx, rtx, rtx);
extern rtx gen_mmx_eqv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_eqv2si3 (rtx, rtx, rtx);
extern rtx gen_mmx_andv8qi3 (rtx, rtx, rtx);
extern rtx gen_mmx_iorv8qi3 (rtx, rtx, rtx);
extern rtx gen_mmx_xorv8qi3 (rtx, rtx, rtx);
extern rtx gen_mmx_andv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_iorv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_xorv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_andv2si3 (rtx, rtx, rtx);
extern rtx gen_mmx_iorv2si3 (rtx, rtx, rtx);
extern rtx gen_mmx_xorv2si3 (rtx, rtx, rtx);
extern rtx gen_mmx_pinsrw (rtx, rtx, rtx, rtx);
extern rtx gen_mmx_pshufw (rtx, rtx, rtx);
extern rtx gen_vec_setv2si (rtx, rtx, rtx);
extern rtx gen_vec_extractv2si (rtx, rtx, rtx);
extern rtx gen_vec_initv2si (rtx, rtx);
extern rtx gen_vec_setv4hi (rtx, rtx, rtx);
extern rtx gen_vec_extractv4hi (rtx, rtx, rtx);
extern rtx gen_vec_initv4hi (rtx, rtx);
extern rtx gen_vec_setv8qi (rtx, rtx, rtx);
extern rtx gen_vec_extractv8qi (rtx, rtx, rtx);
extern rtx gen_vec_initv8qi (rtx, rtx);
extern rtx gen_mmx_uavgv8qi3 (rtx, rtx, rtx);
extern rtx gen_mmx_uavgv4hi3 (rtx, rtx, rtx);
extern rtx gen_mmx_maskmovq (rtx, rtx, rtx);
extern rtx gen_mmx_emms (void);
extern rtx gen_mmx_femms (void);
extern rtx gen_movv64qi (rtx, rtx);
extern rtx gen_movv32qi (rtx, rtx);
extern rtx gen_movv16qi (rtx, rtx);
extern rtx gen_movv32hi (rtx, rtx);
extern rtx gen_movv16hi (rtx, rtx);
extern rtx gen_movv8hi (rtx, rtx);
extern rtx gen_movv16si (rtx, rtx);
extern rtx gen_movv8si (rtx, rtx);
extern rtx gen_movv4si (rtx, rtx);
extern rtx gen_movv8di (rtx, rtx);
extern rtx gen_movv4di (rtx, rtx);
extern rtx gen_movv2di (rtx, rtx);
extern rtx gen_movv4ti (rtx, rtx);
extern rtx gen_movv2ti (rtx, rtx);
extern rtx gen_movv1ti (rtx, rtx);
extern rtx gen_movv16sf (rtx, rtx);
extern rtx gen_movv8sf (rtx, rtx);
extern rtx gen_movv4sf (rtx, rtx);
extern rtx gen_movv8df (rtx, rtx);
extern rtx gen_movv4df (rtx, rtx);
extern rtx gen_movv2df (rtx, rtx);
extern rtx gen_movmisalignv64qi (rtx, rtx);
extern rtx gen_movmisalignv32qi (rtx, rtx);
extern rtx gen_movmisalignv16qi (rtx, rtx);
extern rtx gen_movmisalignv32hi (rtx, rtx);
extern rtx gen_movmisalignv16hi (rtx, rtx);
extern rtx gen_movmisalignv8hi (rtx, rtx);
extern rtx gen_movmisalignv16si (rtx, rtx);
extern rtx gen_movmisalignv8si (rtx, rtx);
extern rtx gen_movmisalignv4si (rtx, rtx);
extern rtx gen_movmisalignv8di (rtx, rtx);
extern rtx gen_movmisalignv4di (rtx, rtx);
extern rtx gen_movmisalignv2di (rtx, rtx);
extern rtx gen_movmisalignv4ti (rtx, rtx);
extern rtx gen_movmisalignv2ti (rtx, rtx);
extern rtx gen_movmisalignv1ti (rtx, rtx);
extern rtx gen_movmisalignv16sf (rtx, rtx);
extern rtx gen_movmisalignv8sf (rtx, rtx);
extern rtx gen_movmisalignv4sf (rtx, rtx);
extern rtx gen_movmisalignv8df (rtx, rtx);
extern rtx gen_movmisalignv4df (rtx, rtx);
extern rtx gen_movmisalignv2df (rtx, rtx);
extern rtx gen_avx512f_loadups512 (rtx, rtx);
extern rtx gen_avx512f_loadups512_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx_loadups256 (rtx, rtx);
extern rtx gen_avx_loadups256_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse_loadups (rtx, rtx);
extern rtx gen_sse_loadups_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_loadupd512 (rtx, rtx);
extern rtx gen_avx512f_loadupd512_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx_loadupd256 (rtx, rtx);
extern rtx gen_avx_loadupd256_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_loadupd (rtx, rtx);
extern rtx gen_sse2_loadupd_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx_loaddquv32qi (rtx, rtx);
extern rtx gen_avx_loaddquv32qi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_loaddquv16qi (rtx, rtx);
extern rtx gen_sse2_loaddquv16qi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_loaddquv64qi (rtx, rtx);
extern rtx gen_avx512f_loaddquv64qi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_loaddquv32hi (rtx, rtx);
extern rtx gen_avx512bw_loaddquv32hi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_loaddquv8hi (rtx, rtx);
extern rtx gen_avx512vl_loaddquv8hi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_loaddquv16hi (rtx, rtx);
extern rtx gen_avx512vl_loaddquv16hi_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_loaddquv16si (rtx, rtx);
extern rtx gen_avx512f_loaddquv16si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx_loaddquv8si (rtx, rtx);
extern rtx gen_avx_loaddquv8si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_loaddquv4si (rtx, rtx);
extern rtx gen_sse2_loaddquv4si_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_loaddquv8di (rtx, rtx);
extern rtx gen_avx512f_loaddquv8di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_loaddquv4di (rtx, rtx);
extern rtx gen_avx512vl_loaddquv4di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_loaddquv2di (rtx, rtx);
extern rtx gen_avx512vl_loaddquv2di_mask (rtx, rtx, rtx, rtx);
extern rtx gen_storentdi (rtx, rtx);
extern rtx gen_storentsi (rtx, rtx);
extern rtx gen_storentsf (rtx, rtx);
extern rtx gen_storentdf (rtx, rtx);
extern rtx gen_storentv8di (rtx, rtx);
extern rtx gen_storentv4di (rtx, rtx);
extern rtx gen_storentv2di (rtx, rtx);
extern rtx gen_storentv16sf (rtx, rtx);
extern rtx gen_storentv8sf (rtx, rtx);
extern rtx gen_storentv4sf (rtx, rtx);
extern rtx gen_storentv8df (rtx, rtx);
extern rtx gen_storentv4df (rtx, rtx);
extern rtx gen_storentv2df (rtx, rtx);
extern rtx gen_absv16sf2 (rtx, rtx);
extern rtx gen_negv16sf2 (rtx, rtx);
extern rtx gen_absv8sf2 (rtx, rtx);
extern rtx gen_negv8sf2 (rtx, rtx);
extern rtx gen_absv4sf2 (rtx, rtx);
extern rtx gen_negv4sf2 (rtx, rtx);
extern rtx gen_absv8df2 (rtx, rtx);
extern rtx gen_negv8df2 (rtx, rtx);
extern rtx gen_absv4df2 (rtx, rtx);
extern rtx gen_negv4df2 (rtx, rtx);
extern rtx gen_absv2df2 (rtx, rtx);
extern rtx gen_negv2df2 (rtx, rtx);
extern rtx gen_addv16sf3 (rtx, rtx, rtx);
extern rtx gen_addv16sf3_round (rtx, rtx, rtx, rtx);
extern rtx gen_addv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subv16sf3 (rtx, rtx, rtx);
extern rtx gen_subv16sf3_round (rtx, rtx, rtx, rtx);
extern rtx gen_subv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addv8sf3 (rtx, rtx, rtx);
static inline rtx gen_addv8sf3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_addv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_addv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_addv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_addv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_subv8sf3 (rtx, rtx, rtx);
static inline rtx gen_subv8sf3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_subv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_subv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_subv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_subv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_addv4sf3 (rtx, rtx, rtx);
static inline rtx gen_addv4sf3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_addv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_addv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_addv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_addv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_subv4sf3 (rtx, rtx, rtx);
static inline rtx gen_subv4sf3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_subv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_subv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_subv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_subv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_addv8df3 (rtx, rtx, rtx);
extern rtx gen_addv8df3_round (rtx, rtx, rtx, rtx);
extern rtx gen_addv8df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subv8df3 (rtx, rtx, rtx);
extern rtx gen_subv8df3_round (rtx, rtx, rtx, rtx);
extern rtx gen_subv8df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addv4df3 (rtx, rtx, rtx);
static inline rtx gen_addv4df3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_addv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_addv4df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_addv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_addv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_subv4df3 (rtx, rtx, rtx);
static inline rtx gen_subv4df3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_subv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_subv4df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_subv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_subv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_addv2df3 (rtx, rtx, rtx);
static inline rtx gen_addv2df3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_addv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_addv2df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_addv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_addv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_subv2df3 (rtx, rtx, rtx);
static inline rtx gen_subv2df3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_subv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_subv2df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_subv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_subv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_mulv16sf3 (rtx, rtx, rtx);
extern rtx gen_mulv16sf3_round (rtx, rtx, rtx, rtx);
extern rtx gen_mulv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mulv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mulv8sf3 (rtx, rtx, rtx);
static inline rtx gen_mulv8sf3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_mulv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_mulv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_mulv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_mulv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_mulv4sf3 (rtx, rtx, rtx);
static inline rtx gen_mulv4sf3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_mulv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_mulv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_mulv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_mulv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_mulv8df3 (rtx, rtx, rtx);
extern rtx gen_mulv8df3_round (rtx, rtx, rtx, rtx);
extern rtx gen_mulv8df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mulv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mulv4df3 (rtx, rtx, rtx);
static inline rtx gen_mulv4df3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_mulv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_mulv4df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_mulv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_mulv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_mulv2df3 (rtx, rtx, rtx);
static inline rtx gen_mulv2df3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_mulv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_mulv2df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_mulv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_mulv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_divv8df3 (rtx, rtx, rtx);
extern rtx gen_divv4df3 (rtx, rtx, rtx);
extern rtx gen_divv2df3 (rtx, rtx, rtx);
extern rtx gen_divv16sf3 (rtx, rtx, rtx);
extern rtx gen_divv8sf3 (rtx, rtx, rtx);
extern rtx gen_divv4sf3 (rtx, rtx, rtx);
extern rtx gen_sqrtv8df2 (rtx, rtx);
extern rtx gen_sqrtv4df2 (rtx, rtx);
extern rtx gen_sqrtv2df2 (rtx, rtx);
extern rtx gen_sqrtv16sf2 (rtx, rtx);
extern rtx gen_sqrtv8sf2 (rtx, rtx);
extern rtx gen_sqrtv4sf2 (rtx, rtx);
extern rtx gen_rsqrtv8sf2 (rtx, rtx);
extern rtx gen_rsqrtv4sf2 (rtx, rtx);
extern rtx gen_smaxv16sf3 (rtx, rtx, rtx);
extern rtx gen_smaxv16sf3_round (rtx, rtx, rtx, rtx);
extern rtx gen_smaxv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smaxv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sminv16sf3 (rtx, rtx, rtx);
extern rtx gen_sminv16sf3_round (rtx, rtx, rtx, rtx);
extern rtx gen_sminv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sminv16sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smaxv8sf3 (rtx, rtx, rtx);
static inline rtx gen_smaxv8sf3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_smaxv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_smaxv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_smaxv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_smaxv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_sminv8sf3 (rtx, rtx, rtx);
static inline rtx gen_sminv8sf3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_sminv8sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_sminv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_sminv8sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_sminv8sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_smaxv4sf3 (rtx, rtx, rtx);
static inline rtx gen_smaxv4sf3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_smaxv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_smaxv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_smaxv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_smaxv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_sminv4sf3 (rtx, rtx, rtx);
static inline rtx gen_sminv4sf3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_sminv4sf3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_sminv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_sminv4sf3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_sminv4sf3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_smaxv8df3 (rtx, rtx, rtx);
extern rtx gen_smaxv8df3_round (rtx, rtx, rtx, rtx);
extern rtx gen_smaxv8df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smaxv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sminv8df3 (rtx, rtx, rtx);
extern rtx gen_sminv8df3_round (rtx, rtx, rtx, rtx);
extern rtx gen_sminv8df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sminv8df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smaxv4df3 (rtx, rtx, rtx);
static inline rtx gen_smaxv4df3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_smaxv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_smaxv4df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_smaxv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_smaxv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_sminv4df3 (rtx, rtx, rtx);
static inline rtx gen_sminv4df3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_sminv4df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_sminv4df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_sminv4df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_sminv4df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_smaxv2df3 (rtx, rtx, rtx);
static inline rtx gen_smaxv2df3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_smaxv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_smaxv2df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_smaxv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_smaxv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_sminv2df3 (rtx, rtx, rtx);
static inline rtx gen_sminv2df3_round (rtx, rtx, rtx, rtx);
static inline rtx
gen_sminv2df3_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
{
return 0;
}
extern rtx gen_sminv2df3_mask (rtx, rtx, rtx, rtx, rtx);
static inline rtx gen_sminv2df3_mask_round (rtx, rtx, rtx, rtx, rtx, rtx);
static inline rtx
gen_sminv2df3_mask_round(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f))
{
return 0;
}
extern rtx gen_sse3_haddv2df3 (rtx, rtx, rtx);
extern rtx gen_reduc_plus_scal_v8df (rtx, rtx);
extern rtx gen_reduc_plus_scal_v4df (rtx, rtx);
extern rtx gen_reduc_plus_scal_v2df (rtx, rtx);
extern rtx gen_reduc_plus_scal_v16sf (rtx, rtx);
extern rtx gen_reduc_plus_scal_v8sf (rtx, rtx);
extern rtx gen_reduc_plus_scal_v4sf (rtx, rtx);
extern rtx gen_reduc_smax_scal_v32qi (rtx, rtx);
extern rtx gen_reduc_smin_scal_v32qi (rtx, rtx);
extern rtx gen_reduc_smax_scal_v16hi (rtx, rtx);
extern rtx gen_reduc_smin_scal_v16hi (rtx, rtx);
extern rtx gen_reduc_smax_scal_v8si (rtx, rtx);
extern rtx gen_reduc_smin_scal_v8si (rtx, rtx);
extern rtx gen_reduc_smax_scal_v4di (rtx, rtx);
extern rtx gen_reduc_smin_scal_v4di (rtx, rtx);
extern rtx gen_reduc_smax_scal_v8sf (rtx, rtx);
extern rtx gen_reduc_smin_scal_v8sf (rtx, rtx);
extern rtx gen_reduc_smax_scal_v4df (rtx, rtx);
extern rtx gen_reduc_smin_scal_v4df (rtx, rtx);
extern rtx gen_reduc_smax_scal_v4sf (rtx, rtx);
extern rtx gen_reduc_smin_scal_v4sf (rtx, rtx);
extern rtx gen_reduc_smax_scal_v64qi (rtx, rtx);
extern rtx gen_reduc_smin_scal_v64qi (rtx, rtx);
extern rtx gen_reduc_smax_scal_v32hi (rtx, rtx);
extern rtx gen_reduc_smin_scal_v32hi (rtx, rtx);
extern rtx gen_reduc_smax_scal_v16si (rtx, rtx);
extern rtx gen_reduc_smin_scal_v16si (rtx, rtx);
extern rtx gen_reduc_smax_scal_v8di (rtx, rtx);
extern rtx gen_reduc_smin_scal_v8di (rtx, rtx);
extern rtx gen_reduc_smax_scal_v16sf (rtx, rtx);
extern rtx gen_reduc_smin_scal_v16sf (rtx, rtx);
extern rtx gen_reduc_smax_scal_v8df (rtx, rtx);
extern rtx gen_reduc_smin_scal_v8df (rtx, rtx);
extern rtx gen_reduc_umax_scal_v16si (rtx, rtx);
extern rtx gen_reduc_umin_scal_v16si (rtx, rtx);
extern rtx gen_reduc_umax_scal_v8di (rtx, rtx);
extern rtx gen_reduc_umin_scal_v8di (rtx, rtx);
extern rtx gen_reduc_umax_scal_v32hi (rtx, rtx);
extern rtx gen_reduc_umin_scal_v32hi (rtx, rtx);
extern rtx gen_reduc_umax_scal_v64qi (rtx, rtx);
extern rtx gen_reduc_umin_scal_v64qi (rtx, rtx);
extern rtx gen_reduc_umax_scal_v32qi (rtx, rtx);
extern rtx gen_reduc_umin_scal_v32qi (rtx, rtx);
extern rtx gen_reduc_umax_scal_v16hi (rtx, rtx);
extern rtx gen_reduc_umin_scal_v16hi (rtx, rtx);
extern rtx gen_reduc_umax_scal_v8si (rtx, rtx);
extern rtx gen_reduc_umin_scal_v8si (rtx, rtx);
extern rtx gen_reduc_umax_scal_v4di (rtx, rtx);
extern rtx gen_reduc_umin_scal_v4di (rtx, rtx);
extern rtx gen_reduc_umin_scal_v8hi (rtx, rtx);
extern rtx gen_vec_cmpv16sihi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv8siqi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv4siqi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv8diqi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv4diqi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv2diqi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv16sfhi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv8sfqi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv4sfqi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv8dfqi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv4dfqi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv2dfqi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv64qidi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv16qihi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv32qisi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv32hisi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv16hihi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv8hiqi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv32qiv32qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv16hiv16hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv8siv8si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv4div4di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv16qiv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv8hiv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv4siv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv2div2di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv8sfv8si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv4dfv4di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv4sfv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpv2dfv2di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv16sihi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv8siqi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv4siqi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv8diqi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv4diqi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv2diqi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv64qidi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv16qihi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv32qisi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv32hisi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv16hihi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv8hiqi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv32qiv32qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv16hiv16hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv8siv8si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv4div4di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv16qiv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv8hiv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv4siv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_cmpuv2div2di (rtx, rtx, rtx, rtx);
extern rtx gen_vcondv64qiv16sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv32hiv16sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16siv16sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8div16sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16sfv16sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8dfv16sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv64qiv8df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv32hiv8df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16siv8df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8div8df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16sfv8df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8dfv8df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv32qiv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv32qiv4df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16hiv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16hiv4df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8siv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8siv4df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4div8sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4div4df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8sfv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8sfv4df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4dfv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4dfv4df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16qiv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16qiv2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8hiv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8hiv2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4siv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4siv2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2div4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2div2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4sfv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4sfv2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2dfv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2dfv2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v16sihi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v8siqi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v4siqi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v8diqi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v4diqi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v2diqi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v16sfhi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v8sfqi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v4sfqi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v8dfqi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v4dfqi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v2dfqi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v64qidi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v16qihi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v32qisi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v32hisi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v16hihi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v8hiqi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v32qiv32qi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v16hiv16hi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v8siv8si (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v4div4di (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v16qiv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v8hiv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v4siv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v2div2di (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v8sfv8si (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v4dfv4di (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v4sfv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vcond_mask_v2dfv2di (rtx, rtx, rtx, rtx);
extern rtx gen_andv8sf3 (rtx, rtx, rtx);
extern rtx gen_andv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_iorv8sf3 (rtx, rtx, rtx);
extern rtx gen_iorv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_xorv8sf3 (rtx, rtx, rtx);
extern rtx gen_xorv8sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_andv4sf3 (rtx, rtx, rtx);
extern rtx gen_andv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_iorv4sf3 (rtx, rtx, rtx);
extern rtx gen_iorv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_xorv4sf3 (rtx, rtx, rtx);
extern rtx gen_xorv4sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_andv4df3 (rtx, rtx, rtx);
extern rtx gen_andv4df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_iorv4df3 (rtx, rtx, rtx);
extern rtx gen_iorv4df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_xorv4df3 (rtx, rtx, rtx);
extern rtx gen_xorv4df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_andv2df3 (rtx, rtx, rtx);
extern rtx gen_andv2df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_iorv2df3 (rtx, rtx, rtx);
extern rtx gen_iorv2df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_xorv2df3 (rtx, rtx, rtx);
extern rtx gen_xorv2df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_andv16sf3 (rtx, rtx, rtx);
extern rtx gen_andv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_iorv16sf3 (rtx, rtx, rtx);
extern rtx gen_iorv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_xorv16sf3 (rtx, rtx, rtx);
extern rtx gen_xorv16sf3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_andv8df3 (rtx, rtx, rtx);
extern rtx gen_andv8df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_iorv8df3 (rtx, rtx, rtx);
extern rtx gen_iorv8df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_xorv8df3 (rtx, rtx, rtx);
extern rtx gen_xorv8df3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_copysignv16sf3 (rtx, rtx, rtx);
extern rtx gen_copysignv8sf3 (rtx, rtx, rtx);
extern rtx gen_copysignv4sf3 (rtx, rtx, rtx);
extern rtx gen_copysignv8df3 (rtx, rtx, rtx);
extern rtx gen_copysignv4df3 (rtx, rtx, rtx);
extern rtx gen_copysignv2df3 (rtx, rtx, rtx);
extern rtx gen_andtf3 (rtx, rtx, rtx);
extern rtx gen_iortf3 (rtx, rtx, rtx);
extern rtx gen_xortf3 (rtx, rtx, rtx);
extern rtx gen_fmasf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmadf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmav4sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmav2df4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmav8sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmav4df4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmav16sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmav8df4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmssf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmsdf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmsv4sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmsv2df4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmsv8sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmsv4df4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmsv16sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fmsv8df4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmasf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmadf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmav4sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmav2df4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmav8sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmav4df4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmav16sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmav8df4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmssf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmsdf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmsv4sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmsv2df4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmsv8sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmsv4df4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmsv16sf4 (rtx, rtx, rtx, rtx);
extern rtx gen_fnmsv8df4 (rtx, rtx, rtx, rtx);
extern rtx gen_fma4i_fmadd_sf (rtx, rtx, rtx, rtx);
extern rtx gen_fma4i_fmadd_df (rtx, rtx, rtx, rtx);
extern rtx gen_fma4i_fmadd_v4sf (rtx, rtx, rtx, rtx);
extern rtx gen_fma4i_fmadd_v2df (rtx, rtx, rtx, rtx);
extern rtx gen_fma4i_fmadd_v8sf (rtx, rtx, rtx, rtx);
extern rtx gen_fma4i_fmadd_v4df (rtx, rtx, rtx, rtx);
extern rtx gen_fma4i_fmadd_v16sf (rtx, rtx, rtx, rtx);
extern rtx gen_fma4i_fmadd_v8df (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmadd_v16sf_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmadd_v16sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v8sf_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v8sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v4sf_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v4sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmadd_v8df_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmadd_v8df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v4df_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v4df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v2df_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmadd_v2df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fmaddsub_v16sf (rtx, rtx, rtx, rtx);
extern rtx gen_fmaddsub_v8sf (rtx, rtx, rtx, rtx);
extern rtx gen_fmaddsub_v4sf (rtx, rtx, rtx, rtx);
extern rtx gen_fmaddsub_v8df (rtx, rtx, rtx, rtx);
extern rtx gen_fmaddsub_v4df (rtx, rtx, rtx, rtx);
extern rtx gen_fmaddsub_v2df (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmaddsub_v16sf_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmaddsub_v16sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v8sf_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v8sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v4sf_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v4sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmaddsub_v8df_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fmaddsub_v8df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v4df_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v4df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v2df_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fmaddsub_v2df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fmai_vmfmadd_v4sf (rtx, rtx, rtx, rtx);
extern rtx gen_fmai_vmfmadd_v4sf_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fmai_vmfmadd_v2df (rtx, rtx, rtx, rtx);
extern rtx gen_fmai_vmfmadd_v2df_round (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_fma4i_vmfmadd_v4sf (rtx, rtx, rtx, rtx);
extern rtx gen_fma4i_vmfmadd_v2df (rtx, rtx, rtx, rtx);
extern rtx gen_floatunsv16siv16sf2 (rtx, rtx);
extern rtx gen_floatunsv8siv8sf2 (rtx, rtx);
extern rtx gen_floatunsv4siv4sf2 (rtx, rtx);
extern rtx gen_fixuns_truncv16sfv16si2 (rtx, rtx);
extern rtx gen_fixuns_truncv8sfv8si2 (rtx, rtx);
extern rtx gen_fixuns_truncv4sfv4si2 (rtx, rtx);
extern rtx gen_avx_cvtpd2dq256_2 (rtx, rtx);
extern rtx gen_avx_cvttpd2dq256_2 (rtx, rtx);
extern rtx gen_sse2_cvtpd2ps (rtx, rtx);
extern rtx gen_sse2_cvtpd2ps_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_cvtmask2bv64qi (rtx, rtx);
extern rtx gen_avx512vl_cvtmask2bv16qi (rtx, rtx);
extern rtx gen_avx512vl_cvtmask2bv32qi (rtx, rtx);
extern rtx gen_avx512bw_cvtmask2wv32hi (rtx, rtx);
extern rtx gen_avx512vl_cvtmask2wv16hi (rtx, rtx);
extern rtx gen_avx512vl_cvtmask2wv8hi (rtx, rtx);
extern rtx gen_avx512f_cvtmask2dv16si (rtx, rtx);
extern rtx gen_avx512vl_cvtmask2dv8si (rtx, rtx);
extern rtx gen_avx512vl_cvtmask2dv4si (rtx, rtx);
extern rtx gen_avx512f_cvtmask2qv8di (rtx, rtx);
extern rtx gen_avx512vl_cvtmask2qv4di (rtx, rtx);
extern rtx gen_avx512vl_cvtmask2qv2di (rtx, rtx);
extern rtx gen_vec_unpacks_hi_v4sf (rtx, rtx);
extern rtx gen_vec_unpacks_hi_v8sf (rtx, rtx);
extern rtx gen_vec_unpacks_hi_v16sf (rtx, rtx);
extern rtx gen_vec_unpacks_lo_v4sf (rtx, rtx);
extern rtx gen_vec_unpacks_lo_v8sf (rtx, rtx);
extern rtx gen_vec_unpacks_float_hi_v32hi (rtx, rtx);
extern rtx gen_vec_unpacks_float_hi_v16hi (rtx, rtx);
extern rtx gen_vec_unpacks_float_hi_v8hi (rtx, rtx);
extern rtx gen_vec_unpacks_float_lo_v32hi (rtx, rtx);
extern rtx gen_vec_unpacks_float_lo_v16hi (rtx, rtx);
extern rtx gen_vec_unpacks_float_lo_v8hi (rtx, rtx);
extern rtx gen_vec_unpacku_float_hi_v32hi (rtx, rtx);
extern rtx gen_vec_unpacku_float_hi_v16hi (rtx, rtx);
extern rtx gen_vec_unpacku_float_hi_v8hi (rtx, rtx);
extern rtx gen_vec_unpacku_float_lo_v32hi (rtx, rtx);
extern rtx gen_vec_unpacku_float_lo_v16hi (rtx, rtx);
extern rtx gen_vec_unpacku_float_lo_v8hi (rtx, rtx);
extern rtx gen_vec_unpacks_float_hi_v4si (rtx, rtx);
extern rtx gen_vec_unpacks_float_lo_v4si (rtx, rtx);
extern rtx gen_vec_unpacks_float_hi_v8si (rtx, rtx);
extern rtx gen_vec_unpacks_float_lo_v8si (rtx, rtx);
extern rtx gen_vec_unpacks_float_hi_v16si (rtx, rtx);
extern rtx gen_vec_unpacks_float_lo_v16si (rtx, rtx);
extern rtx gen_vec_unpacku_float_hi_v4si (rtx, rtx);
extern rtx gen_vec_unpacku_float_lo_v4si (rtx, rtx);
extern rtx gen_vec_unpacku_float_hi_v8si (rtx, rtx);
extern rtx gen_vec_unpacku_float_hi_v16si (rtx, rtx);
extern rtx gen_vec_unpacku_float_lo_v8si (rtx, rtx);
extern rtx gen_vec_unpacku_float_lo_v16si (rtx, rtx);
extern rtx gen_vec_pack_trunc_v8df (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_v4df (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_v2df (rtx, rtx, rtx);
extern rtx gen_vec_pack_sfix_trunc_v8df (rtx, rtx, rtx);
extern rtx gen_vec_pack_sfix_trunc_v4df (rtx, rtx, rtx);
extern rtx gen_vec_pack_sfix_trunc_v2df (rtx, rtx, rtx);
extern rtx gen_vec_pack_ufix_trunc_v8df (rtx, rtx, rtx);
extern rtx gen_vec_pack_ufix_trunc_v4df (rtx, rtx, rtx);
extern rtx gen_vec_pack_ufix_trunc_v2df (rtx, rtx, rtx);
extern rtx gen_avx512f_vec_pack_sfix_v8df (rtx, rtx, rtx);
extern rtx gen_vec_pack_sfix_v4df (rtx, rtx, rtx);
extern rtx gen_vec_pack_sfix_v2df (rtx, rtx, rtx);
extern rtx gen_sse_movhlps_exp (rtx, rtx, rtx);
extern rtx gen_sse_movlhps_exp (rtx, rtx, rtx);
extern rtx gen_vec_interleave_highv8sf (rtx, rtx, rtx);
extern rtx gen_vec_interleave_lowv8sf (rtx, rtx, rtx);
extern rtx gen_avx_shufps256 (rtx, rtx, rtx, rtx);
extern rtx gen_avx_shufps256_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse_shufps (rtx, rtx, rtx, rtx);
extern rtx gen_sse_shufps_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse_loadhps_exp (rtx, rtx, rtx);
extern rtx gen_sse_loadlps_exp (rtx, rtx, rtx);
extern rtx gen_vec_initv16qi (rtx, rtx);
extern rtx gen_vec_initv8hi (rtx, rtx);
extern rtx gen_vec_initv4si (rtx, rtx);
extern rtx gen_vec_initv2di (rtx, rtx);
extern rtx gen_vec_initv4sf (rtx, rtx);
extern rtx gen_vec_initv2df (rtx, rtx);
extern rtx gen_vec_setv32qi (rtx, rtx, rtx);
extern rtx gen_vec_setv16qi (rtx, rtx, rtx);
extern rtx gen_vec_setv16hi (rtx, rtx, rtx);
extern rtx gen_vec_setv8hi (rtx, rtx, rtx);
extern rtx gen_vec_setv16si (rtx, rtx, rtx);
extern rtx gen_vec_setv8si (rtx, rtx, rtx);
extern rtx gen_vec_setv4si (rtx, rtx, rtx);
extern rtx gen_vec_setv8di (rtx, rtx, rtx);
extern rtx gen_vec_setv4di (rtx, rtx, rtx);
extern rtx gen_vec_setv2di (rtx, rtx, rtx);
extern rtx gen_vec_setv16sf (rtx, rtx, rtx);
extern rtx gen_vec_setv8sf (rtx, rtx, rtx);
extern rtx gen_vec_setv4sf (rtx, rtx, rtx);
extern rtx gen_vec_setv8df (rtx, rtx, rtx);
extern rtx gen_vec_setv4df (rtx, rtx, rtx);
extern rtx gen_vec_setv2df (rtx, rtx, rtx);
extern rtx gen_avx512dq_vextractf64x2_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_vextracti64x2_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vextractf32x4_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vextracti32x4_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_vextractf32x8_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_vextracti32x8_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vextractf64x4_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vextracti64x4_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vextractf128v8si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vextractf128v8sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vextractf128v4di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vextractf128v4df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_vextractf128v32qi (rtx, rtx, rtx);
extern rtx gen_avx_vextractf128v16hi (rtx, rtx, rtx);
extern rtx gen_avx_vextractf128v8si (rtx, rtx, rtx);
extern rtx gen_avx_vextractf128v4di (rtx, rtx, rtx);
extern rtx gen_avx_vextractf128v8sf (rtx, rtx, rtx);
extern rtx gen_avx_vextractf128v4df (rtx, rtx, rtx);
extern rtx gen_vec_extractv64qi (rtx, rtx, rtx);
extern rtx gen_vec_extractv32qi (rtx, rtx, rtx);
extern rtx gen_vec_extractv16qi (rtx, rtx, rtx);
extern rtx gen_vec_extractv32hi (rtx, rtx, rtx);
extern rtx gen_vec_extractv16hi (rtx, rtx, rtx);
extern rtx gen_vec_extractv8hi (rtx, rtx, rtx);
extern rtx gen_vec_extractv16si (rtx, rtx, rtx);
extern rtx gen_vec_extractv8si (rtx, rtx, rtx);
extern rtx gen_vec_extractv4si (rtx, rtx, rtx);
extern rtx gen_vec_extractv8di (rtx, rtx, rtx);
extern rtx gen_vec_extractv4di (rtx, rtx, rtx);
extern rtx gen_vec_extractv2di (rtx, rtx, rtx);
extern rtx gen_vec_extractv16sf (rtx, rtx, rtx);
extern rtx gen_vec_extractv8sf (rtx, rtx, rtx);
extern rtx gen_vec_extractv4sf (rtx, rtx, rtx);
extern rtx gen_vec_extractv8df (rtx, rtx, rtx);
extern rtx gen_vec_extractv4df (rtx, rtx, rtx);
extern rtx gen_vec_extractv2df (rtx, rtx, rtx);
extern rtx gen_vec_interleave_highv4df (rtx, rtx, rtx);
extern rtx gen_vec_interleave_highv2df (rtx, rtx, rtx);
extern rtx gen_avx512f_movddup512 (rtx, rtx);
extern rtx gen_avx512f_movddup512_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_unpcklpd512 (rtx, rtx, rtx);
extern rtx gen_avx512f_unpcklpd512_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_movddup256 (rtx, rtx);
extern rtx gen_avx_movddup256_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx_unpcklpd256 (rtx, rtx, rtx);
extern rtx gen_avx_unpcklpd256_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_interleave_lowv4df (rtx, rtx, rtx);
extern rtx gen_vec_interleave_lowv2df (rtx, rtx, rtx);
extern rtx gen_avx512f_vternlogv16si_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vternlogv8si_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vternlogv4si_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vternlogv8di_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vternlogv4di_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vternlogv2di_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shufps512_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fixupimmv16sf_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fixupimmv16sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv8sf_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv8sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv4sf_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv4sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fixupimmv8df_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_fixupimmv8df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv4df_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv4df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv2df_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_fixupimmv2df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sfixupimmv4sf_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sfixupimmv4sf_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sfixupimmv2df_maskz (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_sfixupimmv2df_maskz_round (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shufpd512_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_shufpd256 (rtx, rtx, rtx, rtx);
extern rtx gen_avx_shufpd256_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_shufpd (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_shufpd_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_loadhpd_exp (rtx, rtx, rtx);
extern rtx gen_sse2_loadlpd_exp (rtx, rtx, rtx);
extern rtx gen_avx512f_ss_truncatev16siv16qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512f_truncatev16siv16qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512f_us_truncatev16siv16qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512f_ss_truncatev16siv16hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512f_truncatev16siv16hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512f_us_truncatev16siv16hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512f_ss_truncatev8div8si2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512f_truncatev8div8si2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512f_us_truncatev8div8si2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512f_ss_truncatev8div8hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512f_truncatev8div8hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512f_us_truncatev8div8hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512bw_ss_truncatev32hiv32qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512bw_truncatev32hiv32qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512bw_us_truncatev32hiv32qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev4div4si2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev4div4si2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev4div4si2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev8siv8hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev8siv8hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev8siv8hi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_ss_truncatev16hiv16qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_truncatev16hiv16qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_avx512vl_us_truncatev16hiv16qi2_mask_store (rtx, rtx, rtx);
extern rtx gen_negv64qi2 (rtx, rtx);
extern rtx gen_negv32qi2 (rtx, rtx);
extern rtx gen_negv16qi2 (rtx, rtx);
extern rtx gen_negv32hi2 (rtx, rtx);
extern rtx gen_negv16hi2 (rtx, rtx);
extern rtx gen_negv8hi2 (rtx, rtx);
extern rtx gen_negv16si2 (rtx, rtx);
extern rtx gen_negv8si2 (rtx, rtx);
extern rtx gen_negv4si2 (rtx, rtx);
extern rtx gen_negv8di2 (rtx, rtx);
extern rtx gen_negv4di2 (rtx, rtx);
extern rtx gen_negv2di2 (rtx, rtx);
extern rtx gen_addv64qi3 (rtx, rtx, rtx);
extern rtx gen_subv64qi3 (rtx, rtx, rtx);
extern rtx gen_addv32qi3 (rtx, rtx, rtx);
extern rtx gen_subv32qi3 (rtx, rtx, rtx);
extern rtx gen_addv16qi3 (rtx, rtx, rtx);
extern rtx gen_subv16qi3 (rtx, rtx, rtx);
extern rtx gen_addv32hi3 (rtx, rtx, rtx);
extern rtx gen_subv32hi3 (rtx, rtx, rtx);
extern rtx gen_addv16hi3 (rtx, rtx, rtx);
extern rtx gen_subv16hi3 (rtx, rtx, rtx);
extern rtx gen_addv8hi3 (rtx, rtx, rtx);
extern rtx gen_subv8hi3 (rtx, rtx, rtx);
extern rtx gen_addv16si3 (rtx, rtx, rtx);
extern rtx gen_subv16si3 (rtx, rtx, rtx);
extern rtx gen_addv8si3 (rtx, rtx, rtx);
extern rtx gen_subv8si3 (rtx, rtx, rtx);
extern rtx gen_addv4si3 (rtx, rtx, rtx);
extern rtx gen_subv4si3 (rtx, rtx, rtx);
extern rtx gen_addv8di3 (rtx, rtx, rtx);
extern rtx gen_subv8di3 (rtx, rtx, rtx);
extern rtx gen_addv4di3 (rtx, rtx, rtx);
extern rtx gen_subv4di3 (rtx, rtx, rtx);
extern rtx gen_addv2di3 (rtx, rtx, rtx);
extern rtx gen_subv2di3 (rtx, rtx, rtx);
extern rtx gen_addv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addv8si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subv8si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addv4si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subv4si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addv4di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subv4di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_addv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_subv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_ssaddv64qi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_ssaddv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_usaddv64qi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_usaddv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_sssubv64qi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_sssubv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_ussubv64qi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_ussubv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_ssaddv32qi3 (rtx, rtx, rtx);
extern rtx gen_avx2_ssaddv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_usaddv32qi3 (rtx, rtx, rtx);
extern rtx gen_avx2_usaddv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_sssubv32qi3 (rtx, rtx, rtx);
extern rtx gen_avx2_sssubv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_ussubv32qi3 (rtx, rtx, rtx);
extern rtx gen_avx2_ussubv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_ssaddv16qi3 (rtx, rtx, rtx);
extern rtx gen_sse2_ssaddv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_usaddv16qi3 (rtx, rtx, rtx);
extern rtx gen_sse2_usaddv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_sssubv16qi3 (rtx, rtx, rtx);
extern rtx gen_sse2_sssubv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_ussubv16qi3 (rtx, rtx, rtx);
extern rtx gen_sse2_ussubv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_ssaddv32hi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_ssaddv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_usaddv32hi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_usaddv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_sssubv32hi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_sssubv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_ussubv32hi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_ussubv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_ssaddv16hi3 (rtx, rtx, rtx);
extern rtx gen_avx2_ssaddv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_usaddv16hi3 (rtx, rtx, rtx);
extern rtx gen_avx2_usaddv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_sssubv16hi3 (rtx, rtx, rtx);
extern rtx gen_avx2_sssubv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_ussubv16hi3 (rtx, rtx, rtx);
extern rtx gen_avx2_ussubv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_ssaddv8hi3 (rtx, rtx, rtx);
extern rtx gen_sse2_ssaddv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_usaddv8hi3 (rtx, rtx, rtx);
extern rtx gen_sse2_usaddv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_sssubv8hi3 (rtx, rtx, rtx);
extern rtx gen_sse2_sssubv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_ussubv8hi3 (rtx, rtx, rtx);
extern rtx gen_sse2_ussubv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mulv64qi3 (rtx, rtx, rtx);
extern rtx gen_mulv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mulv32qi3 (rtx, rtx, rtx);
extern rtx gen_mulv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mulv16qi3 (rtx, rtx, rtx);
extern rtx gen_mulv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mulv32hi3 (rtx, rtx, rtx);
extern rtx gen_mulv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mulv16hi3 (rtx, rtx, rtx);
extern rtx gen_mulv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mulv8hi3 (rtx, rtx, rtx);
extern rtx gen_mulv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smulv32hi3_highpart (rtx, rtx, rtx);
extern rtx gen_smulv32hi3_highpart_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_umulv32hi3_highpart (rtx, rtx, rtx);
extern rtx gen_umulv32hi3_highpart_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smulv16hi3_highpart (rtx, rtx, rtx);
extern rtx gen_smulv16hi3_highpart_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_umulv16hi3_highpart (rtx, rtx, rtx);
extern rtx gen_umulv16hi3_highpart_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smulv8hi3_highpart (rtx, rtx, rtx);
extern rtx gen_smulv8hi3_highpart_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_umulv8hi3_highpart (rtx, rtx, rtx);
extern rtx gen_umulv8hi3_highpart_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_even_v16si (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_even_v16si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_even_v8si (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_even_v8si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_even_v4si (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_even_v4si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_even_v16si (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_even_v16si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_even_v8si (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_even_v8si_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_mulv2siv2di3 (rtx, rtx, rtx);
extern rtx gen_sse4_1_mulv2siv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_pmaddwd (rtx, rtx, rtx);
extern rtx gen_sse2_pmaddwd (rtx, rtx, rtx);
extern rtx gen_mulv16si3 (rtx, rtx, rtx);
extern rtx gen_mulv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mulv8si3 (rtx, rtx, rtx);
extern rtx gen_mulv8si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mulv4si3 (rtx, rtx, rtx);
extern rtx gen_mulv4si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_mulv8di3 (rtx, rtx, rtx);
extern rtx gen_mulv4di3 (rtx, rtx, rtx);
extern rtx gen_mulv2di3 (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_hi_v32qi (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_hi_v32qi (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_hi_v16qi (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_hi_v16qi (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_hi_v16hi (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_hi_v16hi (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_hi_v8hi (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_hi_v8hi (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_hi_v8si (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_hi_v8si (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_hi_v4si (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_hi_v4si (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_lo_v32qi (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_lo_v32qi (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_lo_v16qi (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_lo_v16qi (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_lo_v16hi (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_lo_v16hi (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_lo_v8hi (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_lo_v8hi (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_lo_v8si (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_lo_v8si (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_lo_v4si (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_lo_v4si (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_even_v4si (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_odd_v16si (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_odd_v16si (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_odd_v8si (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_odd_v8si (rtx, rtx, rtx);
extern rtx gen_vec_widen_smult_odd_v4si (rtx, rtx, rtx);
extern rtx gen_vec_widen_umult_odd_v4si (rtx, rtx, rtx);
extern rtx gen_sdot_prodv32hi (rtx, rtx, rtx, rtx);
extern rtx gen_sdot_prodv16hi (rtx, rtx, rtx, rtx);
extern rtx gen_sdot_prodv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_sdot_prodv4si (rtx, rtx, rtx, rtx);
extern rtx gen_usadv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_usadv32qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_shl_v16qi (rtx, rtx, rtx);
extern rtx gen_vec_shl_v8hi (rtx, rtx, rtx);
extern rtx gen_vec_shl_v4si (rtx, rtx, rtx);
extern rtx gen_vec_shl_v2di (rtx, rtx, rtx);
extern rtx gen_vec_shr_v16qi (rtx, rtx, rtx);
extern rtx gen_vec_shr_v8hi (rtx, rtx, rtx);
extern rtx gen_vec_shr_v4si (rtx, rtx, rtx);
extern rtx gen_vec_shr_v2di (rtx, rtx, rtx);
extern rtx gen_smaxv32qi3 (rtx, rtx, rtx);
extern rtx gen_sminv32qi3 (rtx, rtx, rtx);
extern rtx gen_umaxv32qi3 (rtx, rtx, rtx);
extern rtx gen_uminv32qi3 (rtx, rtx, rtx);
extern rtx gen_smaxv16hi3 (rtx, rtx, rtx);
extern rtx gen_sminv16hi3 (rtx, rtx, rtx);
extern rtx gen_umaxv16hi3 (rtx, rtx, rtx);
extern rtx gen_uminv16hi3 (rtx, rtx, rtx);
extern rtx gen_smaxv8si3 (rtx, rtx, rtx);
extern rtx gen_sminv8si3 (rtx, rtx, rtx);
extern rtx gen_umaxv8si3 (rtx, rtx, rtx);
extern rtx gen_uminv8si3 (rtx, rtx, rtx);
extern rtx gen_smaxv64qi3 (rtx, rtx, rtx);
extern rtx gen_sminv64qi3 (rtx, rtx, rtx);
extern rtx gen_umaxv64qi3 (rtx, rtx, rtx);
extern rtx gen_uminv64qi3 (rtx, rtx, rtx);
extern rtx gen_smaxv32hi3 (rtx, rtx, rtx);
extern rtx gen_sminv32hi3 (rtx, rtx, rtx);
extern rtx gen_umaxv32hi3 (rtx, rtx, rtx);
extern rtx gen_uminv32hi3 (rtx, rtx, rtx);
extern rtx gen_smaxv16si3 (rtx, rtx, rtx);
extern rtx gen_sminv16si3 (rtx, rtx, rtx);
extern rtx gen_umaxv16si3 (rtx, rtx, rtx);
extern rtx gen_uminv16si3 (rtx, rtx, rtx);
extern rtx gen_smaxv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sminv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_umaxv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_uminv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smaxv8si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sminv8si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_umaxv8si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_uminv8si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smaxv4si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sminv4si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_umaxv4si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_uminv4si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smaxv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sminv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_umaxv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_uminv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smaxv4di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sminv4di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_umaxv4di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_uminv4di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smaxv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sminv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_umaxv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_uminv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_smaxv8di3 (rtx, rtx, rtx);
extern rtx gen_sminv8di3 (rtx, rtx, rtx);
extern rtx gen_umaxv8di3 (rtx, rtx, rtx);
extern rtx gen_uminv8di3 (rtx, rtx, rtx);
extern rtx gen_smaxv4di3 (rtx, rtx, rtx);
extern rtx gen_sminv4di3 (rtx, rtx, rtx);
extern rtx gen_umaxv4di3 (rtx, rtx, rtx);
extern rtx gen_uminv4di3 (rtx, rtx, rtx);
extern rtx gen_smaxv2di3 (rtx, rtx, rtx);
extern rtx gen_sminv2di3 (rtx, rtx, rtx);
extern rtx gen_umaxv2di3 (rtx, rtx, rtx);
extern rtx gen_uminv2di3 (rtx, rtx, rtx);
extern rtx gen_smaxv16qi3 (rtx, rtx, rtx);
extern rtx gen_sminv16qi3 (rtx, rtx, rtx);
extern rtx gen_smaxv8hi3 (rtx, rtx, rtx);
extern rtx gen_sminv8hi3 (rtx, rtx, rtx);
extern rtx gen_smaxv4si3 (rtx, rtx, rtx);
extern rtx gen_sminv4si3 (rtx, rtx, rtx);
extern rtx gen_umaxv16qi3 (rtx, rtx, rtx);
extern rtx gen_uminv16qi3 (rtx, rtx, rtx);
extern rtx gen_umaxv8hi3 (rtx, rtx, rtx);
extern rtx gen_uminv8hi3 (rtx, rtx, rtx);
extern rtx gen_umaxv4si3 (rtx, rtx, rtx);
extern rtx gen_uminv4si3 (rtx, rtx, rtx);
extern rtx gen_avx2_eqv32qi3 (rtx, rtx, rtx);
extern rtx gen_avx2_eqv16hi3 (rtx, rtx, rtx);
extern rtx gen_avx2_eqv8si3 (rtx, rtx, rtx);
extern rtx gen_avx2_eqv4di3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_eqv64qi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_eqv64qi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv16qi3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv16qi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv32qi3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv32qi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_eqv32hi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_eqv32hi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv16hi3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv16hi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv8hi3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv8hi3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_eqv16si3 (rtx, rtx, rtx);
extern rtx gen_avx512f_eqv16si3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv8si3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv8si3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv4si3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv4si3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_eqv8di3 (rtx, rtx, rtx);
extern rtx gen_avx512f_eqv8di3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv4di3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv4di3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv2di3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_eqv2di3_mask (rtx, rtx, rtx, rtx);
extern rtx gen_sse2_eqv16qi3 (rtx, rtx, rtx);
extern rtx gen_sse2_eqv8hi3 (rtx, rtx, rtx);
extern rtx gen_sse2_eqv4si3 (rtx, rtx, rtx);
extern rtx gen_sse4_1_eqv2di3 (rtx, rtx, rtx);
extern rtx gen_vcondv64qiv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv32hiv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16siv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8div64qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16sfv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8dfv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv64qiv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv32hiv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16siv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8div32hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16sfv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8dfv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv64qiv16si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv32hiv16si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16siv16si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8div16si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16sfv16si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8dfv16si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv64qiv8di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv32hiv8di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16siv8di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8div8di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16sfv8di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8dfv8di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv32qiv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16hiv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8siv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4div32qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8sfv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4dfv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv32qiv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16hiv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8siv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4div16hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8sfv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4dfv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv32qiv8si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16hiv8si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8siv8si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4div8si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8sfv8si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4dfv8si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv32qiv4di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16hiv4di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8siv4di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4div4di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8sfv4di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4dfv4di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16qiv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8hiv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4siv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2div16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4sfv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2dfv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16qiv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8hiv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4siv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2div8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4sfv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2dfv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv16qiv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv8hiv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4siv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2div4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv4sfv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2dfv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2div2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcondv2dfv2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv64qiv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv32hiv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv16siv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8div64qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv16sfv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8dfv64qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv64qiv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv32hiv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv16siv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8div32hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv16sfv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8dfv32hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv64qiv16si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv32hiv16si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv16siv16si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8div16si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv16sfv16si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8dfv16si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv64qiv8di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv32hiv8di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv16siv8di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8div8di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv16sfv8di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8dfv8di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv32qiv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv16hiv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8siv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv4div32qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8sfv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv4dfv32qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv32qiv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv16hiv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8siv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv4div16hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8sfv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv4dfv16hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv32qiv8si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv16hiv8si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8siv8si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv4div8si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8sfv8si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv4dfv8si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv32qiv4di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv16hiv4di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8siv4di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv4div4di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8sfv4di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv4dfv4di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv16qiv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8hiv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv4siv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv2div16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv4sfv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv2dfv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv16qiv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8hiv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv4siv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv2div8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv4sfv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv2dfv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv16qiv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv8hiv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv4siv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv2div4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv4sfv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv2dfv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv2div2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vconduv2dfv2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv2di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv2df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv32qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv16hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv8si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv4di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv8sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv4df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv16sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv8df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv16si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv8di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv32hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_permv64qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv4sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv4si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv2df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv2di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv16qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv8hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv8sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv4df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv8si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv4di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv32qi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv16hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv16si (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv8di (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv16sf (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv8df (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv32hi (rtx, rtx, rtx, rtx);
extern rtx gen_vec_perm_constv64qi (rtx, rtx, rtx, rtx);
extern rtx gen_one_cmplv16si2 (rtx, rtx);
extern rtx gen_one_cmplv8di2 (rtx, rtx);
extern rtx gen_one_cmplv64qi2 (rtx, rtx);
extern rtx gen_one_cmplv32qi2 (rtx, rtx);
extern rtx gen_one_cmplv16qi2 (rtx, rtx);
extern rtx gen_one_cmplv32hi2 (rtx, rtx);
extern rtx gen_one_cmplv16hi2 (rtx, rtx);
extern rtx gen_one_cmplv8hi2 (rtx, rtx);
extern rtx gen_one_cmplv8si2 (rtx, rtx);
extern rtx gen_one_cmplv4si2 (rtx, rtx);
extern rtx gen_one_cmplv4di2 (rtx, rtx);
extern rtx gen_one_cmplv2di2 (rtx, rtx);
extern rtx gen_avx512bw_andnotv64qi3 (rtx, rtx, rtx);
extern rtx gen_avx2_andnotv32qi3 (rtx, rtx, rtx);
extern rtx gen_sse2_andnotv16qi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_andnotv32hi3 (rtx, rtx, rtx);
extern rtx gen_avx2_andnotv16hi3 (rtx, rtx, rtx);
extern rtx gen_sse2_andnotv8hi3 (rtx, rtx, rtx);
extern rtx gen_avx512f_andnotv16si3 (rtx, rtx, rtx);
extern rtx gen_avx2_andnotv8si3 (rtx, rtx, rtx);
extern rtx gen_sse2_andnotv4si3 (rtx, rtx, rtx);
extern rtx gen_avx512f_andnotv8di3 (rtx, rtx, rtx);
extern rtx gen_avx2_andnotv4di3 (rtx, rtx, rtx);
extern rtx gen_sse2_andnotv2di3 (rtx, rtx, rtx);
extern rtx gen_avx512f_andnotv16si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_andnotv8si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_andnotv4si3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_andnotv8di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_andnotv4di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_andnotv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_andnotv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_andnotv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_andnotv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_andnotv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_andnotv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_andnotv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_andv16si3 (rtx, rtx, rtx);
extern rtx gen_iorv16si3 (rtx, rtx, rtx);
extern rtx gen_xorv16si3 (rtx, rtx, rtx);
extern rtx gen_andv8di3 (rtx, rtx, rtx);
extern rtx gen_iorv8di3 (rtx, rtx, rtx);
extern rtx gen_xorv8di3 (rtx, rtx, rtx);
extern rtx gen_andv64qi3 (rtx, rtx, rtx);
extern rtx gen_iorv64qi3 (rtx, rtx, rtx);
extern rtx gen_xorv64qi3 (rtx, rtx, rtx);
extern rtx gen_andv32qi3 (rtx, rtx, rtx);
extern rtx gen_iorv32qi3 (rtx, rtx, rtx);
extern rtx gen_xorv32qi3 (rtx, rtx, rtx);
extern rtx gen_andv16qi3 (rtx, rtx, rtx);
extern rtx gen_iorv16qi3 (rtx, rtx, rtx);
extern rtx gen_xorv16qi3 (rtx, rtx, rtx);
extern rtx gen_andv32hi3 (rtx, rtx, rtx);
extern rtx gen_iorv32hi3 (rtx, rtx, rtx);
extern rtx gen_xorv32hi3 (rtx, rtx, rtx);
extern rtx gen_andv16hi3 (rtx, rtx, rtx);
extern rtx gen_iorv16hi3 (rtx, rtx, rtx);
extern rtx gen_xorv16hi3 (rtx, rtx, rtx);
extern rtx gen_andv8hi3 (rtx, rtx, rtx);
extern rtx gen_iorv8hi3 (rtx, rtx, rtx);
extern rtx gen_xorv8hi3 (rtx, rtx, rtx);
extern rtx gen_andv8si3 (rtx, rtx, rtx);
extern rtx gen_iorv8si3 (rtx, rtx, rtx);
extern rtx gen_xorv8si3 (rtx, rtx, rtx);
extern rtx gen_andv4si3 (rtx, rtx, rtx);
extern rtx gen_iorv4si3 (rtx, rtx, rtx);
extern rtx gen_xorv4si3 (rtx, rtx, rtx);
extern rtx gen_andv4di3 (rtx, rtx, rtx);
extern rtx gen_iorv4di3 (rtx, rtx, rtx);
extern rtx gen_xorv4di3 (rtx, rtx, rtx);
extern rtx gen_andv2di3 (rtx, rtx, rtx);
extern rtx gen_iorv2di3 (rtx, rtx, rtx);
extern rtx gen_xorv2di3 (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_v32hi (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_v16hi (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_v8hi (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_v16si (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_v8si (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_v4si (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_v8di (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_v4di (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_v2di (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_qi (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_hi (rtx, rtx, rtx);
extern rtx gen_vec_pack_trunc_si (rtx, rtx, rtx);
extern rtx gen_vec_interleave_highv32qi (rtx, rtx, rtx);
extern rtx gen_vec_interleave_highv16hi (rtx, rtx, rtx);
extern rtx gen_vec_interleave_highv8si (rtx, rtx, rtx);
extern rtx gen_vec_interleave_highv4di (rtx, rtx, rtx);
extern rtx gen_vec_interleave_lowv32qi (rtx, rtx, rtx);
extern rtx gen_vec_interleave_lowv16hi (rtx, rtx, rtx);
extern rtx gen_vec_interleave_lowv8si (rtx, rtx, rtx);
extern rtx gen_vec_interleave_lowv4di (rtx, rtx, rtx);
extern rtx gen_avx512dq_vinsertf64x2_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_vinserti64x2_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vinsertf32x4_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vinserti32x4_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_vinsertf32x8_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_vinserti32x8_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vinsertf64x4_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vinserti64x4_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_shuf_i64x2_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512dq_shuf_f64x2_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shuf_f64x2_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shuf_i64x2_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_shuf_i32x4_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_shuf_f32x4_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shuf_f32x4_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_shuf_i32x4_mask (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_pshufdv3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_pshufdv3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_pshufdv3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_pshufd_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_pshufd (rtx, rtx, rtx);
extern rtx gen_avx512vl_pshuflwv3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_pshuflwv3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_pshuflw_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_pshuflw (rtx, rtx, rtx);
extern rtx gen_avx2_pshufhwv3 (rtx, rtx, rtx);
extern rtx gen_avx512vl_pshufhwv3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_pshufhw_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_pshufhw (rtx, rtx, rtx);
extern rtx gen_sse2_loadd (rtx, rtx);
extern rtx gen_vec_unpacks_lo_v64qi (rtx, rtx);
extern rtx gen_vec_unpacks_lo_v32qi (rtx, rtx);
extern rtx gen_vec_unpacks_lo_v16qi (rtx, rtx);
extern rtx gen_vec_unpacks_lo_v32hi (rtx, rtx);
extern rtx gen_vec_unpacks_lo_v16hi (rtx, rtx);
extern rtx gen_vec_unpacks_lo_v8hi (rtx, rtx);
extern rtx gen_vec_unpacks_lo_v16si (rtx, rtx);
extern rtx gen_vec_unpacks_lo_v8si (rtx, rtx);
extern rtx gen_vec_unpacks_lo_v4si (rtx, rtx);
extern rtx gen_vec_unpacks_hi_v64qi (rtx, rtx);
extern rtx gen_vec_unpacks_hi_v32qi (rtx, rtx);
extern rtx gen_vec_unpacks_hi_v16qi (rtx, rtx);
extern rtx gen_vec_unpacks_hi_v32hi (rtx, rtx);
extern rtx gen_vec_unpacks_hi_v16hi (rtx, rtx);
extern rtx gen_vec_unpacks_hi_v8hi (rtx, rtx);
extern rtx gen_vec_unpacks_hi_v16si (rtx, rtx);
extern rtx gen_vec_unpacks_hi_v8si (rtx, rtx);
extern rtx gen_vec_unpacks_hi_v4si (rtx, rtx);
extern rtx gen_vec_unpacku_lo_v64qi (rtx, rtx);
extern rtx gen_vec_unpacku_lo_v32qi (rtx, rtx);
extern rtx gen_vec_unpacku_lo_v16qi (rtx, rtx);
extern rtx gen_vec_unpacku_lo_v32hi (rtx, rtx);
extern rtx gen_vec_unpacku_lo_v16hi (rtx, rtx);
extern rtx gen_vec_unpacku_lo_v8hi (rtx, rtx);
extern rtx gen_vec_unpacku_lo_v16si (rtx, rtx);
extern rtx gen_vec_unpacku_lo_v8si (rtx, rtx);
extern rtx gen_vec_unpacku_lo_v4si (rtx, rtx);
extern rtx gen_vec_unpacks_lo_hi (rtx, rtx);
extern rtx gen_vec_unpacks_lo_si (rtx, rtx);
extern rtx gen_vec_unpacks_lo_di (rtx, rtx);
extern rtx gen_vec_unpacku_hi_v64qi (rtx, rtx);
extern rtx gen_vec_unpacku_hi_v32qi (rtx, rtx);
extern rtx gen_vec_unpacku_hi_v16qi (rtx, rtx);
extern rtx gen_vec_unpacku_hi_v32hi (rtx, rtx);
extern rtx gen_vec_unpacku_hi_v16hi (rtx, rtx);
extern rtx gen_vec_unpacku_hi_v8hi (rtx, rtx);
extern rtx gen_vec_unpacku_hi_v16si (rtx, rtx);
extern rtx gen_vec_unpacku_hi_v8si (rtx, rtx);
extern rtx gen_vec_unpacku_hi_v4si (rtx, rtx);
extern rtx gen_vec_unpacks_hi_hi (rtx, rtx);
extern rtx gen_vec_unpacks_hi_si (rtx, rtx);
extern rtx gen_vec_unpacks_hi_di (rtx, rtx);
extern rtx gen_avx512bw_uavgv64qi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_uavgv64qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_uavgv32qi3 (rtx, rtx, rtx);
extern rtx gen_avx2_uavgv32qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_uavgv16qi3 (rtx, rtx, rtx);
extern rtx gen_sse2_uavgv16qi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_uavgv32hi3 (rtx, rtx, rtx);
extern rtx gen_avx512bw_uavgv32hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_uavgv16hi3 (rtx, rtx, rtx);
extern rtx gen_avx2_uavgv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_uavgv8hi3 (rtx, rtx, rtx);
extern rtx gen_sse2_uavgv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_maskmovdqu (rtx, rtx, rtx);
extern rtx gen_ssse3_pmulhrswv4hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ssse3_pmulhrswv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_pmulhrswv16hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_ssse3_pmulhrswv4hi3 (rtx, rtx, rtx);
extern rtx gen_ssse3_pmulhrswv8hi3 (rtx, rtx, rtx);
extern rtx gen_avx2_pmulhrswv16hi3 (rtx, rtx, rtx);
extern rtx gen_absv64qi2 (rtx, rtx);
extern rtx gen_absv32qi2 (rtx, rtx);
extern rtx gen_absv16qi2 (rtx, rtx);
extern rtx gen_absv32hi2 (rtx, rtx);
extern rtx gen_absv16hi2 (rtx, rtx);
extern rtx gen_absv8hi2 (rtx, rtx);
extern rtx gen_absv16si2 (rtx, rtx);
extern rtx gen_absv8si2 (rtx, rtx);
extern rtx gen_absv4si2 (rtx, rtx);
extern rtx gen_absv8di2 (rtx, rtx);
extern rtx gen_absv4di2 (rtx, rtx);
extern rtx gen_absv2di2 (rtx, rtx);
extern rtx gen_avx2_pblendw (rtx, rtx, rtx, rtx);
extern rtx gen_avx_roundps_sfix256 (rtx, rtx, rtx);
extern rtx gen_sse4_1_roundps_sfix (rtx, rtx, rtx);
extern rtx gen_avx512f_roundps512 (rtx, rtx, rtx);
extern rtx gen_avx512f_roundpd512 (rtx, rtx, rtx);
extern rtx gen_avx512f_roundps512_sfix (rtx, rtx, rtx);
extern rtx gen_avx512f_roundpd_vec_pack_sfix512 (rtx, rtx, rtx, rtx);
extern rtx gen_avx_roundpd_vec_pack_sfix256 (rtx, rtx, rtx, rtx);
extern rtx gen_sse4_1_roundpd_vec_pack_sfix (rtx, rtx, rtx, rtx);
extern rtx gen_roundv16sf2 (rtx, rtx);
extern rtx gen_roundv8sf2 (rtx, rtx);
extern rtx gen_roundv4sf2 (rtx, rtx);
extern rtx gen_roundv8df2 (rtx, rtx);
extern rtx gen_roundv4df2 (rtx, rtx);
extern rtx gen_roundv2df2 (rtx, rtx);
extern rtx gen_roundv16sf2_sfix (rtx, rtx);
extern rtx gen_roundv8sf2_sfix (rtx, rtx);
extern rtx gen_roundv4sf2_sfix (rtx, rtx);
extern rtx gen_roundv8df2_vec_pack_sfix (rtx, rtx, rtx);
extern rtx gen_roundv4df2_vec_pack_sfix (rtx, rtx, rtx);
extern rtx gen_roundv2df2_vec_pack_sfix (rtx, rtx, rtx);
extern rtx gen_avx512pf_gatherpfv16sisf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512pf_gatherpfv8disf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512pf_gatherpfv8sidf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512pf_gatherpfv8didf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512pf_scatterpfv16sisf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512pf_scatterpfv8disf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512pf_scatterpfv8sidf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512pf_scatterpfv8didf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_rotlv16qi3 (rtx, rtx, rtx);
extern rtx gen_rotlv8hi3 (rtx, rtx, rtx);
extern rtx gen_rotlv4si3 (rtx, rtx, rtx);
extern rtx gen_rotlv2di3 (rtx, rtx, rtx);
extern rtx gen_rotrv16qi3 (rtx, rtx, rtx);
extern rtx gen_rotrv8hi3 (rtx, rtx, rtx);
extern rtx gen_rotrv4si3 (rtx, rtx, rtx);
extern rtx gen_rotrv2di3 (rtx, rtx, rtx);
extern rtx gen_vrotrv16qi3 (rtx, rtx, rtx);
extern rtx gen_vrotrv8hi3 (rtx, rtx, rtx);
extern rtx gen_vrotrv4si3 (rtx, rtx, rtx);
extern rtx gen_vrotrv2di3 (rtx, rtx, rtx);
extern rtx gen_vrotlv16qi3 (rtx, rtx, rtx);
extern rtx gen_vrotlv8hi3 (rtx, rtx, rtx);
extern rtx gen_vrotlv4si3 (rtx, rtx, rtx);
extern rtx gen_vrotlv2di3 (rtx, rtx, rtx);
extern rtx gen_vlshrv16qi3 (rtx, rtx, rtx);
extern rtx gen_vlshrv8hi3 (rtx, rtx, rtx);
extern rtx gen_vlshrv4si3 (rtx, rtx, rtx);
extern rtx gen_vlshrv2di3 (rtx, rtx, rtx);
extern rtx gen_vlshrv16si3 (rtx, rtx, rtx);
extern rtx gen_vlshrv8di3 (rtx, rtx, rtx);
extern rtx gen_vlshrv8si3 (rtx, rtx, rtx);
extern rtx gen_vlshrv4di3 (rtx, rtx, rtx);
extern rtx gen_vashrv8hi3 (rtx, rtx, rtx);
extern rtx gen_vashrv8hi3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vashrv16qi3 (rtx, rtx, rtx);
extern rtx gen_vashrv2di3 (rtx, rtx, rtx);
extern rtx gen_vashrv2di3_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vashrv4si3 (rtx, rtx, rtx);
extern rtx gen_vashrv16si3 (rtx, rtx, rtx);
extern rtx gen_vashrv8si3 (rtx, rtx, rtx);
extern rtx gen_vashlv16qi3 (rtx, rtx, rtx);
extern rtx gen_vashlv8hi3 (rtx, rtx, rtx);
extern rtx gen_vashlv4si3 (rtx, rtx, rtx);
extern rtx gen_vashlv2di3 (rtx, rtx, rtx);
extern rtx gen_vashlv16si3 (rtx, rtx, rtx);
extern rtx gen_vashlv8di3 (rtx, rtx, rtx);
extern rtx gen_vashlv8si3 (rtx, rtx, rtx);
extern rtx gen_vashlv4di3 (rtx, rtx, rtx);
extern rtx gen_ashlv64qi3 (rtx, rtx, rtx);
extern rtx gen_lshrv64qi3 (rtx, rtx, rtx);
extern rtx gen_ashrv64qi3 (rtx, rtx, rtx);
extern rtx gen_ashlv32qi3 (rtx, rtx, rtx);
extern rtx gen_lshrv32qi3 (rtx, rtx, rtx);
extern rtx gen_ashrv32qi3 (rtx, rtx, rtx);
extern rtx gen_ashlv16qi3 (rtx, rtx, rtx);
extern rtx gen_lshrv16qi3 (rtx, rtx, rtx);
extern rtx gen_ashrv16qi3 (rtx, rtx, rtx);
extern rtx gen_ashrv2di3 (rtx, rtx, rtx);
extern rtx gen_xop_vmfrczv4sf2 (rtx, rtx);
extern rtx gen_xop_vmfrczv2df2 (rtx, rtx);
extern rtx gen_avx_vzeroall (void);
extern rtx gen_avx2_permv4di (rtx, rtx, rtx);
extern rtx gen_avx2_permv4df (rtx, rtx, rtx);
extern rtx gen_avx512f_permv8di (rtx, rtx, rtx);
extern rtx gen_avx512f_permv8df (rtx, rtx, rtx);
extern rtx gen_avx512vl_permv4di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_permv4df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_permv8di_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_permv8df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermilv8df (rtx, rtx, rtx);
extern rtx gen_avx512f_vpermilv8df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_vpermilv4df (rtx, rtx, rtx);
extern rtx gen_avx_vpermilv4df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_vpermilv2df (rtx, rtx, rtx);
extern rtx gen_avx_vpermilv2df_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermilv16sf (rtx, rtx, rtx);
extern rtx gen_avx512f_vpermilv16sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_vpermilv8sf (rtx, rtx, rtx);
extern rtx gen_avx_vpermilv8sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_vpermilv4sf (rtx, rtx, rtx);
extern rtx gen_avx_vpermilv4sf_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermi2varv16si3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermi2varv16sf3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermi2varv8di3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermi2varv8df3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv8si3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv8sf3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv4di3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv4df3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv4si3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv4sf3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv2di3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv2df3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vpermi2varv64qi3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv16qi3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv32qi3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv8hi3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermi2varv16hi3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vpermi2varv32hi3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermt2varv16si3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermt2varv16sf3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermt2varv8di3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_vpermt2varv8df3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv8si3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv8sf3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv4di3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv4df3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv4si3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv4sf3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv2di3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv2df3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vpermt2varv64qi3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv16qi3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv32qi3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv8hi3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vpermt2varv16hi3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512bw_vpermt2varv32hi3_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_vperm2f128v8si3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx_vperm2f128v8sf3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx_vperm2f128v4df3 (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vinsertv8si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vinsertv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vinsertv4di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_vinsertv4df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx_vinsertf128v32qi (rtx, rtx, rtx, rtx);
extern rtx gen_avx_vinsertf128v16hi (rtx, rtx, rtx, rtx);
extern rtx gen_avx_vinsertf128v8si (rtx, rtx, rtx, rtx);
extern rtx gen_avx_vinsertf128v4di (rtx, rtx, rtx, rtx);
extern rtx gen_avx_vinsertf128v8sf (rtx, rtx, rtx, rtx);
extern rtx gen_avx_vinsertf128v4df (rtx, rtx, rtx, rtx);
extern rtx gen_maskloadv4sfv4si (rtx, rtx, rtx);
extern rtx gen_maskloadv2dfv2di (rtx, rtx, rtx);
extern rtx gen_maskloadv8sfv8si (rtx, rtx, rtx);
extern rtx gen_maskloadv4dfv4di (rtx, rtx, rtx);
extern rtx gen_maskloadv4siv4si (rtx, rtx, rtx);
extern rtx gen_maskloadv2div2di (rtx, rtx, rtx);
extern rtx gen_maskloadv8siv8si (rtx, rtx, rtx);
extern rtx gen_maskloadv4div4di (rtx, rtx, rtx);
extern rtx gen_maskloadv16sihi (rtx, rtx, rtx);
extern rtx gen_maskloadv8siqi (rtx, rtx, rtx);
extern rtx gen_maskloadv4siqi (rtx, rtx, rtx);
extern rtx gen_maskloadv8diqi (rtx, rtx, rtx);
extern rtx gen_maskloadv4diqi (rtx, rtx, rtx);
extern rtx gen_maskloadv2diqi (rtx, rtx, rtx);
extern rtx gen_maskloadv16sfhi (rtx, rtx, rtx);
extern rtx gen_maskloadv8sfqi (rtx, rtx, rtx);
extern rtx gen_maskloadv4sfqi (rtx, rtx, rtx);
extern rtx gen_maskloadv8dfqi (rtx, rtx, rtx);
extern rtx gen_maskloadv4dfqi (rtx, rtx, rtx);
extern rtx gen_maskloadv2dfqi (rtx, rtx, rtx);
extern rtx gen_maskloadv64qidi (rtx, rtx, rtx);
extern rtx gen_maskloadv16qihi (rtx, rtx, rtx);
extern rtx gen_maskloadv32qisi (rtx, rtx, rtx);
extern rtx gen_maskloadv32hisi (rtx, rtx, rtx);
extern rtx gen_maskloadv16hihi (rtx, rtx, rtx);
extern rtx gen_maskloadv8hiqi (rtx, rtx, rtx);
extern rtx gen_maskstorev4sfv4si (rtx, rtx, rtx);
extern rtx gen_maskstorev2dfv2di (rtx, rtx, rtx);
extern rtx gen_maskstorev8sfv8si (rtx, rtx, rtx);
extern rtx gen_maskstorev4dfv4di (rtx, rtx, rtx);
extern rtx gen_maskstorev4siv4si (rtx, rtx, rtx);
extern rtx gen_maskstorev2div2di (rtx, rtx, rtx);
extern rtx gen_maskstorev8siv8si (rtx, rtx, rtx);
extern rtx gen_maskstorev4div4di (rtx, rtx, rtx);
extern rtx gen_maskstorev16sihi (rtx, rtx, rtx);
extern rtx gen_maskstorev8siqi (rtx, rtx, rtx);
extern rtx gen_maskstorev4siqi (rtx, rtx, rtx);
extern rtx gen_maskstorev8diqi (rtx, rtx, rtx);
extern rtx gen_maskstorev4diqi (rtx, rtx, rtx);
extern rtx gen_maskstorev2diqi (rtx, rtx, rtx);
extern rtx gen_maskstorev16sfhi (rtx, rtx, rtx);
extern rtx gen_maskstorev8sfqi (rtx, rtx, rtx);
extern rtx gen_maskstorev4sfqi (rtx, rtx, rtx);
extern rtx gen_maskstorev8dfqi (rtx, rtx, rtx);
extern rtx gen_maskstorev4dfqi (rtx, rtx, rtx);
extern rtx gen_maskstorev2dfqi (rtx, rtx, rtx);
extern rtx gen_maskstorev64qidi (rtx, rtx, rtx);
extern rtx gen_maskstorev16qihi (rtx, rtx, rtx);
extern rtx gen_maskstorev32qisi (rtx, rtx, rtx);
extern rtx gen_maskstorev32hisi (rtx, rtx, rtx);
extern rtx gen_maskstorev16hihi (rtx, rtx, rtx);
extern rtx gen_maskstorev8hiqi (rtx, rtx, rtx);
extern rtx gen_cbranchv4si4 (rtx, rtx, rtx, rtx);
extern rtx gen_cbranchv2di4 (rtx, rtx, rtx, rtx);
extern rtx gen_cbranchv8si4 (rtx, rtx, rtx, rtx);
extern rtx gen_cbranchv4di4 (rtx, rtx, rtx, rtx);
extern rtx gen_vec_initv32qi (rtx, rtx);
extern rtx gen_vec_initv16hi (rtx, rtx);
extern rtx gen_vec_initv8si (rtx, rtx);
extern rtx gen_vec_initv4di (rtx, rtx);
extern rtx gen_vec_initv8sf (rtx, rtx);
extern rtx gen_vec_initv4df (rtx, rtx);
extern rtx gen_vec_initv16si (rtx, rtx);
extern rtx gen_vec_initv16sf (rtx, rtx);
extern rtx gen_vec_initv8di (rtx, rtx);
extern rtx gen_vec_initv8df (rtx, rtx);
extern rtx gen_vec_initv32hi (rtx, rtx);
extern rtx gen_vec_initv64qi (rtx, rtx);
extern rtx gen_vcvtps2ph_mask (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vcvtps2ph (rtx, rtx, rtx);
extern rtx gen_avx2_gathersiv2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_gathersiv2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_gathersiv4di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_gathersiv4df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_gathersiv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_gathersiv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_gathersiv8si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_gathersiv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_gatherdiv2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_gatherdiv2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_gatherdiv4di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_gatherdiv4df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_gatherdiv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_gatherdiv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_gatherdiv8si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx2_gatherdiv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_gathersiv16si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_gathersiv16sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_gathersiv8di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_gathersiv8df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gathersiv8si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gathersiv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gathersiv4di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gathersiv4df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gathersiv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gathersiv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gathersiv2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gathersiv2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_gatherdiv16si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_gatherdiv16sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_gatherdiv8di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_gatherdiv8df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gatherdiv8si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gatherdiv8sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gatherdiv4di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gatherdiv4df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gatherdiv4si (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gatherdiv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gatherdiv2di (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_gatherdiv2df (rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_scattersiv16si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_scattersiv16sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_scattersiv8di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_scattersiv8df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scattersiv8si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scattersiv8sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scattersiv4di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scattersiv4df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scattersiv4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scattersiv4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scattersiv2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scattersiv2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_scatterdiv16si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_scatterdiv16sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_scatterdiv8di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_scatterdiv8df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scatterdiv8si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scatterdiv8sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scatterdiv4di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scatterdiv4df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scatterdiv4si (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scatterdiv4sf (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scatterdiv2di (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_scatterdiv2df (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_expandv16si_maskz (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_expandv16sf_maskz (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_expandv8di_maskz (rtx, rtx, rtx, rtx);
extern rtx gen_avx512f_expandv8df_maskz (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_expandv8si_maskz (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_expandv8sf_maskz (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_expandv4di_maskz (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_expandv4df_maskz (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_expandv4si_maskz (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_expandv4sf_maskz (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_expandv2di_maskz (rtx, rtx, rtx, rtx);
extern rtx gen_avx512vl_expandv2df_maskz (rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52huqv8di_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52huqv4di_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52huqv2di_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52luqv8di_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52luqv4di_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_vpamdd52luqv2di_maskz (rtx, rtx, rtx, rtx, rtx);
extern rtx gen_sse2_lfence (void);
extern rtx gen_sse_sfence (void);
extern rtx gen_sse2_mfence (void);
extern rtx gen_mem_thread_fence (rtx);
extern rtx gen_atomic_loadqi (rtx, rtx, rtx);
extern rtx gen_atomic_loadhi (rtx, rtx, rtx);
extern rtx gen_atomic_loadsi (rtx, rtx, rtx);
extern rtx gen_atomic_loaddi (rtx, rtx, rtx);
extern rtx gen_atomic_storeqi (rtx, rtx, rtx);
extern rtx gen_atomic_storehi (rtx, rtx, rtx);
extern rtx gen_atomic_storesi (rtx, rtx, rtx);
extern rtx gen_atomic_storedi (rtx, rtx, rtx);
extern rtx gen_atomic_compare_and_swapqi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_atomic_compare_and_swaphi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_atomic_compare_and_swapsi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_atomic_compare_and_swapdi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
extern rtx gen_atomic_compare_and_swapti (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
#endif /* GCC_INSN_FLAGS_H */
|